示例#1
0
def monkeypatch_runner():
    # monkey patch for greenlet support
    # monkey.patch_all() - raising key error in Py 2.7.9
    monkey.patch_socket()
    monkey.patch_dns()
    monkey.patch_ssl()
    monkey.patch_os()
示例#2
0
    def init(self):
        import options
        from UnityEngine import Debug

        L = lambda s: Debug.Log("PyWrap: " + s)
        L("init")

        self.events = []

        # should set them
        options.no_update
        options.show_hidden_mode

        options.freeplay = False

        if options.no_update:
            import autoupdate
            autoupdate.Autoupdate = autoupdate.DummyAutoupdate

        L("before gevent")
        from gevent import monkey
        monkey.patch_socket()
        monkey.patch_os()
        monkey.patch_select()
        L("after gevent")

        from game import autoenv
        autoenv.init('Client')

        import thb.ui.ui_meta  # noqa, init ui_meta

        from client.core.executive import Executive
        self.executive = ExecutiveWrapper(Executive, self)
示例#3
0
def test_d():
    from gevent import monkey
    import urllib2
    import simplejson as json
    monkey.patch_socket()

    def fetch(pid):
        response = urllib2.urlopen('http://json-time.appspot.com/time.json')
        result = response.read()
        json_result = json.loads(result)
        datetime = json_result['datetime']

        print('Process %s: %s' % (pid, datetime))
        return json_result['datetime']

    def synchronous():
        for i in range(1, 10):
            fetch(i)

    def asynchronous():
        import gevent

        threads = []
        for i in range(1, 10):
            threads.append(gevent.spawn(fetch, i))
        gevent.joinall(threads)

    print('Synchronous:')
    synchronous()

    print('Asynchronous:')
    asynchronous()
示例#4
0
    def run(self):
        if settings.NEED_PATCH_SOCKET_SSL:
            # gevent.pywsgi启动
            from gevent.monkey import patch_socket, patch_ssl
            patch_socket()
            # 在patch socket之后,如果使用https会出错,需要连ssl也patch掉
            patch_ssl()

        if not settings.NEED_GEVENT_THREADPOOL:
            def sync(func, *args, **kwargs):
                return func(*args, **kwargs)
            self.async_execute = sync

        self._listen_sock = socket(family=AF_INET)
        self._listen_sock.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
        self._listen_sock.bind((settings.HOST, settings.PORT))
        self._listen_sock.listen(2048)
        self._listen_sock.setblocking(0)

        if settings.WORKERS == 1:
            # 只有一个worker 启用单进程运行
            self.serve_forever()
        else:
            self.fork_workers(settings.WORKERS or settings.CPUS + 1)
            self.parent_execute()
示例#5
0
	def __init__(self, count=10):
		""" init class """
		monkey.patch_socket()
		self._index = 0
		self._count = count
		self._pool = pool.Pool(self._count)
		self._event = gevent.event.Event()
		gevent.signal(signal.SIGTERM, self.quit)
 def look_up_topics(self, topics, pool_size=4):
     from gevent.pool import Pool
     if self.PATCH_GEVENT:
         from gevent import monkey
         monkey.patch_socket()
     
     pool = Pool(pool_size)
     for result in pool.imap_unordered(self._lookup_topic, topics):
         yield result 
示例#7
0
    def __init__(self,
                 cities=('北京', '上海', '广州', '深圳', '杭州'),
                 positions=('python', 'java', 'php', 'c', 'c++')):
        monkey.patch_socket()

        self.task_queue = Queue()
        self.visited_pages = []
        self.is_continue = True

        for p in positions:
            for c in cities:
                self.task_queue.put((p, c, 1))
示例#8
0
def patch_socket(patch_gevent=False):
	""" patch socket """

	kson.patch_socket()

	if patch_gevent:
		from gevent import monkey
		monkey.patch_socket()

		from kson import network
		from gevent.socket import socket

		socket.recvbytes = network._recvbytes
		socket.recvobj = network._gzip_recvobj
		socket.sendobj = network._gzip_sendobj
示例#9
0
def e_gevent():
	from gevent import monkey; monkey.patch_socket()
	import gevent

	def f(n):
		for i in range(n):
			print gevent.getcurrent(), i
			gevent.sleep(0)

	g1 = gevent.spawn(f, 5)
	g2 = gevent.spawn(f, 5)
	g3 = gevent.spawn(f, 5)
	g1.join()
	g2.join()
	g3.join()
 def getUserInfo(self, oauth2Session, token_url, client_secret, code, getUrl):
     # 暫解(2016/9/28) ValueError: do_handshake_on_connect should not be specified for non-blocking sockets
     monkey.patch_socket()
     monkey.patch_ssl()
     oauth2Session.fetch_token(token_url=token_url,
                               client_secret=client_secret,
                               code=code)
     # FB api 改版後,要改格式才捉得到email , 各家可能會不一樣
     if 'https://www.googleapis.com' in getUrl:
         getUser = oauth2Session.get(getUrl)
     elif 'https://graph.facebook' in getUrl:
         getUser = oauth2Session.get('%s%s' % (getUrl, 'fields=name,email'))
     else:
         getUser = oauth2Session.get(getUrl)
     return getUser
示例#11
0
    def _initialize_collection(self):
        try:
            from gevent import monkey
            monkey.patch_socket()
        except ImportError:
            pass

        if self._rsname:
            self.connection = pymongo.MongoReplicaSetClient(self._rshosts, replicaSet=self._rsname,
                read_preference=self._read_preference, socketTimeoutMS=self._socket_timeout_ms,
                connectTimeoutMS=self._connect_timeout_ms, tag_sets=self._tag_sets)
        else:
            self.connection = pymongo.Connection(self._host, self._port)

        self._db = self.connection[self._database]
        if self._user and self._password:
            self._db.authenticate(self._user, self._password)
        self._coll= self._db[self._collection]
    def setUp(self):
        if self.use_greenlets:
            if not has_gevent:
                raise SkipTest("Gevent not installed")

            # Note we don't do patch_thread() or patch_all() - we're
            # testing here that patch_thread() is unnecessary for
            # the connection pool to work properly.
            monkey.patch_socket()

        self.c = self.get_connection(auto_start_request=False)

        # reset the db
        db = self.c[DB]
        db.unique.drop()
        db.test.drop()
        db.unique.insert({"_id": "jesse"}, safe=True)

        db.test.insert([{} for i in range(10)], safe=True)
示例#13
0
    def __init__(self, session):

        monkey.patch_socket() 
        import gevent_zeromq
        gevent_zeromq.monkey_patch()
         #we do use greenlets, but only patch sock stuff
        #other stuff messes up the

        config = session.arduino 
        boardglob = config['board']
        boards = glob(boardglob)
        if not len(boards):
              raise Exception("No Arduino found")
        
        self.board = Arduino(boards[0])
        #self.iterator = util.Iterator(self.board)
        #self.iterator.daemon = True
        #self.iterator.start()

        #initialize servo objects
        self.servos = {}
        if "servos" in config:
            for servo in config["servos"]:
                self.servos[servo['name']] = Servo(servo['pin'], self.board)

        #initialize light objects
        self.digitalouts = {}
        if "digitalouts" in config:
            for do in config["digitalouts"]:
                self.digitalouts[do['name']] = DigitalOut(do['pin'], self.board)

        if "digitalouts" in config or "servos" in config:
            self.subsock = ChannelManager().subscribe("ControlOutput/")

        self.buttons = []
        if "buttons" in config:
            for button in config["buttons"]:
                self.buttons.append(Button(button['pin'], button['message'], self.board))

        self.potentiometers = []
        if "potentiometers" in config:
            for pot in config["potentiometers"]:
                self.buttons.append(Potentiometer(pot['pin'], pot['name'], self.board))                
示例#14
0
    def init(self):
        import options
        from UnityEngine import Debug

        L = lambda s: Debug.Log("PyWrap: " + s)
        L("init")

        self.events = []

        # should set them
        options.no_update
        options.no_crashreport
        options.show_hidden_mode

        options.freeplay = False

        if options.no_update:
            import autoupdate
            autoupdate.Autoupdate = autoupdate.DummyAutoupdate

        L("before gevent")
        from gevent import monkey
        monkey.patch_socket()
        monkey.patch_os()
        monkey.patch_select()
        L("after gevent")

        from game import autoenv
        autoenv.init('Client')

        import gamepack.thb.ui.ui_meta  # noqa, init ui_meta

        # For debug
        @gevent.spawn
        def beat():
            while True:
                gevent.sleep(1)
                # self.events.append(("tick",))

        from client.core.executive import Executive
        self.executive = ExecutiveWrapper(Executive, self)
示例#15
0
    def start(self):
        from multiprocessing import Process
        from gevent import monkey
        monkey.patch_socket()
        monkey.patch_os()
        from gevent.wsgi import WSGIServer
        server = WSGIServer((self.host, self.port), self.handler)
        server.pre_start()
        def serve_forever():
            logger.info('starting server')
            try:
                server.start_accepting()

                try:
                    server._stopped_event.wait()
                except:
                    raise
            except KeyboardInterrupt:
                pass                
        for i in range(2):
            Process(target=serve_forever, args=tuple()).start()
        serve_forever()
示例#16
0
    def _initialize_collection(self):
        try:
            from gevent import monkey
            monkey.patch_socket()
        except ImportError:
            pass

        self.connection = pymongo.MongoClient(connect=False, host=self._host, replicaset=self._rsname,
            sockettimeoutms=self._socket_timeout_ms, connecttimeoutms=self._connect_timeout_ms,
            serverSelectionTimeoutMS=self._server_selection_timeout_ms,read_preference=self._read_preference)

        self._db = self.connection[self._database]
        if self._user and self._password:
            self._db.authenticate(self._user, self._password)
        if pymongo.version_tuple[0] < 3:
            self._coll = self._db[self._collection]
        else:
            self._coll = self._db.get_collection(self._collection)
            if not self._coll:
                if self._compression:
                    self._coll = self._db.create_collection(self._collection,
                                                            storageEngine={'wiredTiger':
                                                                            {'configString': 'block_compressor=none'}})
                else:
                    self._coll = self._db.create_collection(self._collection)

        # create indexes if they do not exist
        if isinstance(self._collection_indexes, list) and len(self._collection_indexes):
            indexes_info = {}
            try:
                indexes_info = self._coll.index_information()
            except:
                pass
            for index_desc in self._collection_indexes:
                index_name = index_desc['NAME']
                index_description = index_desc['INDEX_DESCRIPTION']
                if index_name not in indexes_info:
                    self._coll.create_index(index_description)
    def setUp(self):
        if self.use_greenlets:
            if not has_gevent:
                raise SkipTest("Gevent not installed")

            # Note we don't do patch_thread() or patch_all() - we're
            # testing here that patch_thread() is unnecessary for
            # the connection pool to work properly.
            monkey.patch_socket()

        self.c = self.get_connection(auto_start_request=False)

        # reset the db
        db = self.c[DB]
        db.unique.drop()
        db.test.drop()
        db.unique.insert({"_id": "jesse"})

        # In tests like test_max_pool_size, we start some threads that will
        # perform simultaneous queries, forcing the pool to give out many
        # sockets. We need enough docs here that some threads are still in
        # progress when other threads start.
        db.test.insert([{} for i in range(3000)])
示例#18
0
    def test_simple_urlfetch():
        ''' test: simple urlfetch
        '''
        import urllib2
        from gevent import monkey
        monkey.patch_socket()
        
        def simple_urlfetch(url):
            return "%s:%d" % (url, len(urllib2.urlopen(url).read()))

        print 'test_simple_urlfetch:',
        queue = Queue()
        queue.put('http://www.google.com')
        queue.put('http://www.yahoo.com')
        queue.put('http://www.yandex.ru')
        queue.put('http://www.wired.com')
        queue.put('http://python.org/')
        queue.put('http://www.ubuntu.com/')
        queue.put('http://www.apple.com/')
        
        imap = imap_nonblocking(3, simple_urlfetch, queue)
        result = [r for r in imap if r is not None]
        assert len(result) == 7, result
        print 'OK'
示例#19
0
def on_starting(server):
    # use server hook to patch socket to allow worker reloading
    from gevent import monkey
    monkey.patch_socket()
示例#20
0
    def download(self, concurrent=False, source=None, formats=None, glob_pattern=None,
                 dry_run=False, verbose=False, ignore_existing=False):
        """Download the entire item into the current working directory.

        :type concurrent: bool
        :param concurrent: Download files concurrently if ``True``.

        :type source: str
        :param source: Only download files matching given source.

        :type formats: str
        :param formats: Only download files matching the given Formats.

        :type glob_pattern: str
        :param glob_pattern: Only download files matching the given glob
                             pattern

        :type ignore_existing: bool
        :param ignore_existing: Overwrite local files if they already
                                exist.

        :rtype: bool
        :returns: True if if files have been downloaded successfully.

        """
        if concurrent:
            try:
                from gevent import monkey
                monkey.patch_socket()
                from gevent.pool import Pool
                pool = Pool()
            except ImportError:
                raise ImportError(
                """No module named gevent

                Downloading files concurrently requires the gevent neworking library.
                gevent and all of it's dependencies can be installed with pip:

                \tpip install cython git+git://github.com/surfly/[email protected]#egg=gevent

                """)

        files = self.files()
        if source:
            if type(source) == str:
                source = [source]
            files = [f for f in files if f.source in source]
        if formats:
            if type(formats) == str:
                formats = [formats]
            files = [f for f in files if f.format in formats]
        if glob_pattern:
            files = [f for f in files if fnmatch(f.name, glob_pattern)]

        for f in files:
            fname = f.name.encode('utf-8')
            path = os.path.join(self.identifier, fname)
            if dry_run:
                stdout.write(f.url + '\n')
                continue
            if verbose:
                stdout.write(' downloading: {0}\n'.format(fname))
            if concurrent:
                pool.spawn(f.download, path, ignore_existing=ignore_existing)
            else:
                f.download(path, ignore_existing=ignore_existing)
        if concurrent:
            pool.join()
        return True
示例#21
0
"""
Usage: Start script, then use you web browser, wget, etc. to visit:
    http://localhost:8080/world/
    http://localhost:8080/world2/
"""
from gevent.monkey import patch_socket
patch_socket()
import logging
import gevent
import webapp2
from gevent import wsgi
from gevent.event import AsyncResult
from webob import exc


class EventLoopRequestContext(webapp2.RequestContext):
    """
    Override webapp2.RequestContext in order to avoid setting thread-local variables, since we're always working
    in a single thread with gevent.
    """
    def __enter__(self):
        """
        Same as in webapp2.RequestContext, except that thread-local variables are not set.
        """
        request = self.app.request_class(self.environ)
        response = self.app.response_class()
        # Make active app and response available through the request object.
        request.app = self.app
        request.response = response
        return request, response
示例#22
0
文件: uurl.py 项目: geekbuntu/uurl
 def run(self, handler):
     from gevent import monkey
     monkey.patch_socket()
     from gevent.wsgi import WSGIServer
     WSGIServer((self.host, self.port), handler).serve_forever()
示例#23
0
from collections import OrderedDict
from binascii import unhexlify, hexlify
from datetime import datetime
import hashlib
from sockbasic import *
from random import randint
import gevent
from gevent import monkey, socket
from gevent.pool import Pool
from gevent.queue import Queue, Empty
import multiprocessing
from ftplib import FTP

#from multiprocessing import Queue
#from multiprocessing.queues import Empty
monkey.patch_socket(dns=False, aggressive=False)
monkey.patch_time()
monkey.patch_os()
monkey.patch_thread()
monkey.patch_ssl()
#monkey.patch_all(socket=True,dns=False,time=True,select=False,thread=True,os=True,ssl=False,httplib=False,subprocess=False,aggressive=True)
import threading

#FTP_HOST='192.168.25.105'
FTP_HOST = 'ftp.jieli.net'


def logger_worker(queue, logger):
    #n = time.time() + 120
    while 1:
        #if time.time() > n:
示例#24
0
 def __new__(cls, *args, **kwargs):
     if not hasattr(cls, '_MongoClient'):
         from gevent import monkey; monkey.patch_socket()
         from pymongo import MongoClient
         cls._MongoClient = MongoClient
     return super(ProfileSink, cls).__new__(cls, *args, **kwargs)
示例#25
0
# -*- encoding: UTF-8 -*-

import argparse
import sys
import os
import itertools

import gevent
from gevent import monkey; monkey.patch_socket()
from gevent import socket

#from twisted.internet.protocol import Protocol, Factory
#from twisted.internet import reactor


import handler
import screen
import db

# Parse program arguments
parser = argparse.ArgumentParser(description='ptt BBS server daemon.')

parser.add_argument('-d', '--daemon', action='store_true', help='Launch in Daemon mode')
parser.add_argument('-t', '--tunnel', help='Tunnel Mode', default='telnet')
parser.add_argument('-p', '--port', help='Listen port', type=int, default='9090')
parser.add_argument('-b', '--base', help='Host url', default='127.0.0.1')
#parser.add_argument('-e', '--encode', help='Character Encoding', default='big5')

args = parser.parse_args()

## Telnet Commands (not complete)
示例#26
0
 def __init__(self, db):
     monkey.patch_socket()
     super(GeventConsumer, self).__init__(db)
示例#27
0
 def run(self):
     from gevent.monkey import patch_socket, patch_ssl
     patch_socket()
     # 在patch socket之后,如果使用https会出错,需要连ssl也patch掉
     patch_ssl()
     self._engine.run()
示例#28
0
 def __init__(self):
     monkey.patch_socket()
     self._event = gevent.event.Event()
     gevent.signal(signal.SIGTERM, self.quit)
示例#29
0
    def download(self, concurrent=False, source=None, formats=None, glob_pattern=None,
                 ignore_existing=False):
        """Download the entire item into the current working directory.

        :type concurrent: bool
        :param concurrent: Download files concurrently if ``True``.

        :type source: str
        :param source: Only download files matching given source.

        :type formats: str
        :param formats: Only download files matching the given Formats.

        :type glob_pattern: str
        :param glob_pattern: Only download files matching the given glob
                             pattern

        :type ignore_existing: bool
        :param ignore_existing: Overwrite local files if they already
                                exist.

        :rtype: bool
        :returns: True if if files have been downloaded successfully.

        """
        if concurrent:
            try:
                from gevent import monkey
                monkey.patch_socket()
                from gevent.pool import Pool
                pool = Pool()
            except ImportError:
                raise ImportError(
                """No module named gevent

                Downloading files concurrently requires the gevent neworking library.
                gevent and all of it's dependencies can be installed with pip:

                \tpip install cython git+git://github.com/surfly/[email protected]#egg=gevent

                """)

        files = self.files()
        if source:
            if type(source) == str:
                source = [source]
            files = [f for f in files if f.source in source]
        if formats:
            if type(formats) == str:
                formats = [formats]
            files = [f for f in files if f.format in formats]
        if glob_pattern:
            files = [f for f in files if fnmatch(f.name, glob_pattern)]

        for f in files:
            fname = f.name.encode('utf-8')
            path = os.path.join(self.identifier, fname)
            stdout.write('downloading: {0}\n'.format(fname))
            if concurrent:
                pool.spawn(f.download, path, ignore_existing=ignore_existing)
            else:
                f.download(path, ignore_existing=ignore_existing)
        if concurrent:
            pool.join()
        return True
示例#30
0
from gevent import monkey
monkey.patch_socket()  #对socket标准库打上猴子补丁,此后socket标准库中的类和方法都会被替换成非阻塞式的
import gevent
import socket

urls = ['www.pronhub.com', 'www.gevent.org', 'www.python.org']
jobs = [gevent.spawn(socket.gethostbyname, url) for url in urls]
gevent.joinall(jobs, timeout=5)

print([job.value for job in jobs])
'''
上述代码的第一行就是对socket标准库打上猴子补丁,此后socket标准库中的类和方法都会被替换成非阻塞式的,
所有其他的代码都不用修改,这样协程的效率就真正体现出来了。
Python中其它标准库也存在阻塞的情况,gevent提供了”monkey.patch_all()”方法将所有标准库都替换
from gevent import monkey; monkey.patch_all()
'''
示例#31
0
#!/usr/bin/env python3
from gevent.monkey import patch_socket, patch_ssl; patch_socket(); patch_ssl()
from aiohttp import ClientSession
from datetime import datetime
from logging import getLogger, Formatter, StreamHandler, FileHandler, DEBUG, ERROR
from os import listdir
from os.path import isfile, join
from sys import stderr
from traceback import print_exc
from time import sleep

from discord import ClientException
from discord.ext.commands import Bot, when_mentioned_or
from steam.enums import EResult
from steam.client import SteamClient
from steam import guard
from Login_details import preferences, sensitive_details


class AutoCord(Bot):
    def __init__(self):
        super().__init__(command_prefix=when_mentioned_or(preferences.command_prefix), case_insensitive=True,
                         description='**tf2-autocord** is a Discord bot that manages your tf2automatic bot. As it '
                                     'sends your Steam messages through Discord by logging into to your Steam '
                                     'account, then it will then monitor Steam chat messages from tf2automatic then '
                                     'send them to your Discord bot.')

    async def on_connect(self):
        bot.dsdone = True

    async def setup(self, bot):
示例#32
0
 def __init__(self, db):
     monkey.patch_socket()
     super(GeventConsumer, self).__init__(db)
示例#33
0
# 多进程、多线程
# ==============================================================
import threading
from threading import Thread

import multiprocessing
from multiprocessing import Pool, Process

from concurrent.futures import ThreadPoolExecutor
from concurrent.futures import ProcessPoolExecutor

# pip install pip install gevent
import gevent
from gevent.monkey import patch_socket
from gevent.pool import Pool
patch_socket()

# pip install tomorrow
import tomorrow
from tomorrow import threads

import asyncio
import aiohttp
import asynchat


# 日志
# ==============================================================
import logging

示例#34
0
文件: __init__.py 项目: Justxu/codap
These are some utitlites classes I built for improving response time in a web service.
I used this to simplify making sequencial requests to data storage (serivices or database)
concurrent so that there would be less IO wait. Warning more traditional RDBMS may have inverse
performance compared to caches or datastores that are distributed.

"""
import sys


try:
    # Prefer gevent as it would be fastest
    from gevent import spawn
    from gevent.queue import Queue
    from gevent import monkey
    monkey.patch_socket()  # Required if we want IO to be concurrent
except:
    try:
        # Eventlet we are also fans of so that would be great
        from eventlet import spawn
        from eventlet.queue import Queue
        import eventlet
        eventlet.monkey_patch(all=True)  # To support concurrent IO

    except:
        # Thread worst case but this will not scale well at all
        if sys.version_info[0] == 3:  # Python 3
            from queue import Queue as ThreadQueue
        else:
            from Queue import Queue as ThreadQueue
        from threading import Thread
示例#35
0
from gevent import sleep
from gevent.pool import Pool
from gevent.queue import Queue
from gevent import monkey
monkey.patch_socket()
pool = Pool(20)
qu = Queue()


class PageBase(object):
    def __init__(self):
        self.delay = 30


class CreateTree(PageBase):
    # 创建tree
    name = "创建树"

    def process(self, stream):
        '''异步生成文档树
        :param stream:stream是一个生成器,每一个元素为实际的url
        :return:tree对象及其url/参数
        :bug:这个方法未能完全达到要求,启动时有阻塞,以及不稳定。
        '''
        def _(obj):
            attribute = 'url' if isinstance(obj, str) else 'params'
            setattr(crawler, attribute, obj)
            t = crawler.crawl()
            if t:
                qu.put((t, obj))
            sleep(self.delay / 1000)
示例#36
0
#!/usr/bin/python
# -*- coding: utf-8 -*-

__description__ = 'Malcom - Malware communications analyzer'
__author__ = '@tomchop_'
__version__ = '1.3 alpha'
__license__ = "GPL"

# patch threads
from gevent import monkey; monkey.patch_socket(dns=False);

# system
import os, datetime, time, sys, signal, argparse, re, pickle
import netifaces as ni

# db
from pymongo import MongoClient

# json / bson
from bson.objectid import ObjectId
from bson.json_util import dumps, loads

import json

# flask stuff
from werkzeug import secure_filename
from flask import Flask, request, render_template, redirect, url_for, g, make_response, abort, flash, send_from_directory, Response, session
from flask.ext.login import LoginManager, login_user, login_required, logout_user, current_user
from functools import wraps

# websockets / WSGI
示例#37
0
#!/usr/bin/python
# -*- coding: utf-8 -*-

__description__ = 'Malcom - Malware communications analyzer'
__author__ = '@tomchop_'
__version__ = '1.3 alpha'
__license__ = "GPL"

# patch threads
from gevent import monkey; monkey.patch_socket(dns=False);

# system
import os, datetime, time, sys, signal, argparse, re, pickle
import netifaces as ni

# db
from pymongo import MongoClient

# json / bson
from bson.objectid import ObjectId
from bson.json_util import dumps, loads

import json

# flask stuff
from werkzeug import secure_filename
from flask import Flask, request, render_template, redirect, url_for, g, make_response, abort, flash, send_from_directory, Response, session
from flask.ext.login import LoginManager, login_user, login_required, logout_user, current_user
from functools import wraps

# websockets / WSGI
示例#38
0
from gevent import monkey; monkey.patch_socket()
import gevent
from gevent.pool import Pool
import json
import urllib2
import sys
import time

def do_write(data,i):
    #u = urllib2.urlopen('http://localhost:8080/api/update/sh', data).read()
    u = urllib2.urlopen('http://the.open-budget.org.il/api/update/sh', data).read()
    print u,i

if __name__=="__main__":
    pool = Pool(10)
    lines = []
    years = [ int(x) for x in sys.argv[1:] ]
    i = 0
    for line in file("search.json"):
        lines.append(line.strip())
        if len(lines) == 100:
            pool.spawn(do_write, "\n".join(lines),i)
            lines = []
        i+=1
    if len(lines)>0:
        do_write("\n".join(lines),i)
    pool.join()
示例#39
0
import argparse
from builtins import map
from builtins import object
# from builtins import range
from builtins import str
import json
import os
import signal
import socket
import sys
import time
import traceback

from cfgm_common.zkclient import ZookeeperClient
from gevent import monkey
monkey.patch_socket()  # noqa
from gevent.greenlet import Greenlet
from gevent.pool import Pool
import jsonschema
from past.builtins import basestring
import subprocess32
from vnc_api.vnc_api import VncApi

from job_manager.job_exception import JobException
from job_manager.job_handler import JobHandler
from job_manager.job_log_utils import JobLogUtils
from job_manager.job_messages import MsgBundle
from job_manager.job_result_handler import JobResultHandler
from job_manager.job_utils import JobFileWrite, JobStatus, JobUtils
from job_manager.sandesh_utils import SandeshUtils
 def init_pool(self, worker_count):
     monkey.patch_socket()
     return Pool(worker_count)
示例#41
0
def start_client():
    import ctypes
    try:
        ctypes.cdll.avbin  # force avbin load
    except:
        pass

    import logging
    import os
    import argparse
    import utils.logging

    parser = argparse.ArgumentParser(prog=sys.argv[0])
    parser.add_argument('--no-update', action='store_true')
    parser.add_argument('--with-gl-errcheck', action='store_true')
    parser.add_argument('--freeplay', action='store_true')
    parser.add_argument('--fastjoin', action='store_true')
    parser.add_argument('--dump-gameobj', action='store_true')
    parser.add_argument('--log', default='INFO')
    parser.add_argument('--color-log', action='store_true')
    parser.add_argument('--show-hidden-modes', action='store_true')

    options = parser.parse_args()

    import options as opmodule
    opmodule.options = options

    IS_PROTON = hasattr(os, 'uname') and os.uname()[:2] == ('Linux', 'Proton')

    import settings
    utils.logging.init(options.log.upper(), settings.SENTRY_DSN, IS_PROTON or options.color_log)
    utils.logging.patch_gevent_hub_print_exception()

    if options.no_update:
        import autoupdate
        autoupdate.Autoupdate = autoupdate.DummyAutoupdate

    log = logging.getLogger('start_client')

    from gevent import monkey
    monkey.patch_socket()
    monkey.patch_os()
    monkey.patch_select()
    monkey.patch_ssl()

    from game import autoenv
    autoenv.init('Client')

    import pyglet

    pyglet.options['shadow_window'] = False

    if not options.with_gl_errcheck:
        pyglet.options['debug_gl'] = False

    from pyglet.gl import gl_info
    if gl_info.get_renderer() == 'GDI Generic':
        ctypes.windll.user32.MessageBoxW(
            0,
            u'你好像没有安装显卡驱动……?这样游戏是跑不起来的。快去安装!',
            u'需要显卡驱动',
            16,
        )
        sys.exit(0)

    if sys.platform.startswith('linux') and options.dump_gameobj:
        import atexit
        import game
        atexit.register(game.GameObjectMeta._dump_gameobject_hierarchy)
        atexit.register(game.EventHandler._dump_eh_dependency_graph)

    from client.ui.entry import start_ui

    try:
        start_ui()
    except KeyboardInterrupt:
        import pyglet
        pyglet.app.exit()
        raise
    except:
        import pyglet
        pyglet.app.exit()

        if options.fastjoin:
            import pdb
            pdb.post_mortem()

        log.exception(u'UI线程崩溃,正在报告bug,请稍等下……')
        from utils.stats import stats
        stats({'event': 'crash'})

        raise
"""
    gevent 协程模块 实现 tcp server
    思路: 1. 将每个客户端的处理设置为协程函数
          2. 让 socket 模块下的阻塞可以触发协程跳转

"""
import gevent
from gevent import monkey

monkey.patch_socket()  # 执行脚本,修改socket阻塞
from socket import *


def handle(c):
    while True:
        data = c.recv(1024).decode()
        if not data:
            break
        print(data)
        c.send(b"OK")


# 创建套接字
s = socket()
s.setsockopt(SOL_SOCKET, SO_REUSEADDR, 1)
s.bind(("0.0.0.0", 8888))
s.listen(3)

# 循环接受客户端连接
while True:
    c, addr = s.accept()
示例#43
0
from command_server import *
from net_package import NetPackage
from net_communicator import NetCommunicator
import socket
from struct import *
import time
import lib.tkutil as tkutil
import threading

import gevent
from gevent import monkey
monkey.patch_socket()  # should be careful to use


class ClientConnection:
    """
    client connection model
    """
    MAX_NO_RESPONSE = 120  # max seconds with no response from client before disconnect it

    def __init__(self, r_ip, r_port, sock_c=None):
        self.sock_c = sock_c
        self.seq = 0
        self.remote_ip = r_ip
        self.remote_port = r_port
        self.last_package_time = time.time()
        self.at_room = -1


class GateServerBase(CommandServer):
def mesos_cpu_metrics_provider(
    marathon_service_config, marathon_tasks, mesos_tasks, log_utilization_data={},
    noop=False, **kwargs,
):
    """
    Gets the mean cpu utilization of a service across all of its tasks.

    :param marathon_service_config: the MarathonServiceConfig to get data from
    :param marathon_tasks: Marathon tasks to get data from
    :param mesos_tasks: Mesos tasks to get data from
    :param log_utilization_data: A dict used to transfer utilization data to autoscale_marathon_instance()

    :returns: the service's mean utilization, from 0 to 1
    """

    autoscaling_root = compose_autoscaling_zookeeper_root(
        service=marathon_service_config.service,
        instance=marathon_service_config.instance,
    )
    zk_last_time_path = '%s/cpu_last_time' % autoscaling_root
    zk_last_cpu_data = '%s/cpu_data' % autoscaling_root

    with ZookeeperPool() as zk:
        try:
            last_time = zk.get(zk_last_time_path)[0].decode('utf8')
            last_cpu_data = zk.get(zk_last_cpu_data)[0].decode('utf8')
            log_utilization_data[last_time] = last_cpu_data
            last_time = float(last_time)
            last_cpu_data = (datum for datum in last_cpu_data.split(',') if datum)
        except NoNodeError:
            last_time = 0.0
            last_cpu_data = []

    monkey.patch_socket()
    jobs = [gevent.spawn(task.stats_callable) for task in mesos_tasks]
    gevent.joinall(jobs, timeout=60)
    mesos_tasks = dict(zip([task['id'] for task in mesos_tasks], [job.value for job in jobs]))

    current_time = int(datetime.now().strftime('%s'))
    time_delta = current_time - last_time

    mesos_cpu_data = {}
    for task_id, stats in mesos_tasks.items():
        if stats is not None:
            try:
                utime = float(stats['cpus_user_time_secs'])
                stime = float(stats['cpus_system_time_secs'])
                limit = float(stats['cpus_limit']) - .1
                mesos_cpu_data[task_id] = (stime + utime) / limit
            except KeyError:
                pass

    if not mesos_cpu_data:
        raise MetricsProviderNoDataError("Couldn't get any cpu data from Mesos")

    cpu_data_csv = ','.join('%s:%s' % (cpu_seconds, task_id) for task_id, cpu_seconds in mesos_cpu_data.items())
    log_utilization_data[str(current_time)] = cpu_data_csv

    if not noop:
        with ZookeeperPool() as zk:
            zk.ensure_path(zk_last_cpu_data)
            zk.ensure_path(zk_last_time_path)
            zk.set(zk_last_cpu_data, str(cpu_data_csv).encode('utf8'))
            zk.set(zk_last_time_path, str(current_time).encode('utf8'))

    utilization = {}
    for datum in last_cpu_data:
        last_cpu_seconds, task_id = datum.split(':')
        if task_id in mesos_cpu_data:
            utilization[task_id] = (mesos_cpu_data[task_id] - float(last_cpu_seconds)) / time_delta

    if not utilization:
        raise MetricsProviderNoDataError("""The mesos_cpu metrics provider doesn't have Zookeeper data for this service.
                                         This is expected for its first run.""")

    task_utilization = utilization.values()
    mean_utilization = mean(task_utilization)

    return mean_utilization
示例#45
0
 def setUp(self):
     monkey.patch_socket()
     core.case.subscription.subscriptions = {}
     case_database.initialize()
示例#46
0
    def download(self, concurrent=None, source=None, formats=None, glob_pattern=None,
                 dry_run=None, verbose=None, ignore_existing=None, checksum=None,
                 destdir=None, no_directory=None):
        """Download the entire item into the current working directory.

        :type concurrent: bool
        :param concurrent: Download files concurrently if ``True``.

        :type source: str
        :param source: Only download files matching given source.

        :type formats: str
        :param formats: Only download files matching the given Formats.

        :type glob_pattern: str
        :param glob_pattern: Only download files matching the given glob
                             pattern

        :type ignore_existing: bool
        :param ignore_existing: Overwrite local files if they already
                                exist.

        :type checksum: bool
        :param checksum: Skip downloading file based on checksum.

        :type no_directory: bool
        :param no_directory: Download files to current working
                             directory rather than creating an item
                             directory.

        :rtype: bool
        :returns: True if if files have been downloaded successfully.

        """
        concurrent = False if concurrent is None else concurrent
        dry_run = False if dry_run is None else dry_run
        verbose = False if verbose is None else verbose
        ignore_existing = False if ignore_existing is None else ignore_existing
        checksum = False if checksum is None else checksum
        no_directory = False if no_directory is None else no_directory

        if verbose:
            sys.stdout.write('{0}:\n'.format(self.identifier))
            if self._json.get('is_dark') is True:
                sys.stdout.write(' skipping: item is dark.\n')
                log.warning('Not downloading item {0}, '
                            'item is dark'.format(self.identifier))
            elif self.metadata == {}:
                sys.stdout.write(' skipping: item does not exist.\n')
                log.warning('Not downloading item {0}, '
                            'item does not exist.'.format(self.identifier))

        if concurrent:
            try:
                from gevent import monkey
                monkey.patch_socket()
                from gevent.pool import Pool
                pool = Pool()
            except ImportError:
                raise ImportError(
                    """No module named gevent

                    Downloading files concurrently requires the gevent neworking library.
                    gevent and all of it's dependencies can be installed with pip:

                    \tpip install cython git+git://github.com/surfly/[email protected]#egg=gevent

                    """)

        files = self.iter_files()
        if source:
            files = self.get_files(source=source)
        if formats:
            files = self.get_files(formats=formats)
        if glob_pattern:
            files = self.get_files(glob_pattern=glob_pattern)

        if not files and verbose:
            sys.stdout.write(' no matching files found, nothing downloaded.\n')
        for f in files:
            fname = f.name.encode('utf-8')
            if no_directory:
                path = fname
            else:
                path = os.path.join(self.identifier, fname)
            if dry_run:
                sys.stdout.write(f.url + '\n')
                continue
            if concurrent:
                pool.spawn(f.download, path, verbose, ignore_existing, checksum, destdir)
            else:
                f.download(path, verbose, ignore_existing, checksum, destdir)
        if concurrent:
            pool.join()
        return True
示例#47
0
    def download(self, concurrent=None, source=None, formats=None, glob_pattern=None,
                 dry_run=None, verbose=None, ignore_existing=None, checksum=None,
                 destdir=None, no_directory=None):
        """Download the entire item into the current working directory.

        :type concurrent: bool
        :param concurrent: Download files concurrently if ``True``.

        :type source: str
        :param source: Only download files matching given source.

        :type formats: str
        :param formats: Only download files matching the given Formats.

        :type glob_pattern: str
        :param glob_pattern: Only download files matching the given glob
                             pattern

        :type ignore_existing: bool
        :param ignore_existing: Overwrite local files if they already
                                exist.

        :type checksum: bool
        :param checksum: Skip downloading file based on checksum.

        :type no_directory: bool
        :param no_directory: Download files to current working
                             directory rather than creating an item
                             directory.

        :rtype: bool
        :returns: True if if files have been downloaded successfully.

        """
        concurrent = False if concurrent is None else concurrent
        dry_run = False if dry_run is None else dry_run
        verbose = False if verbose is None else verbose
        ignore_existing = False if ignore_existing is None else ignore_existing
        checksum = False if checksum is None else checksum
        no_directory = False if no_directory is None else no_directory

        if verbose:
            sys.stdout.write('{0}:\n'.format(self.identifier))
            if self._json.get('is_dark') is True:
                sys.stdout.write(' skipping: item is dark.\n')
                log.warning('Not downloading item {0}, '
                            'item is dark'.format(self.identifier))
            elif self.metadata == {}:
                sys.stdout.write(' skipping: item does not exist.\n')
                log.warning('Not downloading item {0}, '
                            'item does not exist.'.format(self.identifier))

        if concurrent:
            try:
                from gevent import monkey
                monkey.patch_socket()
                from gevent.pool import Pool
                pool = Pool()
            except ImportError:
                raise ImportError(
                    """No module named gevent

                    Downloading files concurrently requires the gevent neworking library.
                    gevent and all of it's dependencies can be installed with pip:

                    \tpip install cython git+git://github.com/surfly/[email protected]#egg=gevent

                    """)

        files = self.iter_files()
        if source:
            files = self.get_files(source=source)
        if formats:
            files = self.get_files(formats=formats)
        if glob_pattern:
            files = self.get_files(glob_pattern=glob_pattern)

        if not files and verbose:
            sys.stdout.write(' no matching files found, nothing downloaded.\n')
        for f in files:
            fname = f.name.encode('utf-8')
            if no_directory:
                path = fname
            else:
                path = os.path.join(self.identifier, fname)
            if dry_run:
                sys.stdout.write(f.url + '\n')
                continue
            if concurrent:
                pool.spawn(f.download, path, verbose, ignore_existing, checksum, destdir)
            else:
                f.download(path, verbose, ignore_existing, checksum, destdir)
        if concurrent:
            pool.join()
        return True
示例#48
0
def mesos_cpu_metrics_provider(
    marathon_service_config,
    system_paasta_config,
    marathon_tasks,
    mesos_tasks,
    log_utilization_data={},
    noop=False,
    **kwargs,
):
    """
    Gets the mean cpu utilization of a service across all of its tasks.

    :param marathon_service_config: the MarathonServiceConfig to get data from
    :param marathon_tasks: Marathon tasks to get data from
    :param mesos_tasks: Mesos tasks to get data from
    :param log_utilization_data: A dict used to transfer utilization data to autoscale_marathon_instance()

    :returns: the service's mean utilization, from 0 to 1
    """

    autoscaling_root = compose_autoscaling_zookeeper_root(
        service=marathon_service_config.service,
        instance=marathon_service_config.instance,
    )
    zk_last_time_path = '%s/cpu_last_time' % autoscaling_root
    zk_last_cpu_data = '%s/cpu_data' % autoscaling_root

    with ZookeeperPool() as zk:
        try:
            last_time = zk.get(zk_last_time_path)[0].decode('utf8')
            last_cpu_data = zk.get(zk_last_cpu_data)[0].decode('utf8')
            log_utilization_data[last_time] = last_cpu_data
            last_time = float(last_time)
            last_cpu_data = (datum for datum in last_cpu_data.split(',')
                             if datum)
        except NoNodeError:
            last_time = 0.0
            last_cpu_data = []

    monkey.patch_socket()
    jobs = [gevent.spawn(task.stats_callable) for task in mesos_tasks]
    gevent.joinall(jobs, timeout=60)
    mesos_tasks = dict(
        zip([task['id'] for task in mesos_tasks], [job.value for job in jobs]))

    current_time = int(datetime.now().strftime('%s'))
    time_delta = current_time - last_time

    mesos_cpu_data = {}
    for task_id, stats in mesos_tasks.items():
        if stats is not None:
            try:
                utime = float(stats['cpus_user_time_secs'])
                stime = float(stats['cpus_system_time_secs'])
                limit = float(stats['cpus_limit']) - .1
                mesos_cpu_data[task_id] = (stime + utime) / limit
            except KeyError:
                pass

    if not mesos_cpu_data:
        raise MetricsProviderNoDataError(
            "Couldn't get any cpu data from Mesos")

    cpu_data_csv = ','.join('%s:%s' % (cpu_seconds, task_id)
                            for task_id, cpu_seconds in mesos_cpu_data.items())
    log_utilization_data[str(current_time)] = cpu_data_csv

    if not noop:
        with ZookeeperPool() as zk:
            zk.ensure_path(zk_last_cpu_data)
            zk.ensure_path(zk_last_time_path)
            zk.set(zk_last_cpu_data, str(cpu_data_csv).encode('utf8'))
            zk.set(zk_last_time_path, str(current_time).encode('utf8'))

    utilization = {}
    for datum in last_cpu_data:
        last_cpu_seconds, task_id = datum.split(':')
        if task_id in mesos_cpu_data:
            cputime_delta = mesos_cpu_data[task_id] - float(last_cpu_seconds)

            if system_paasta_config.get_filter_bogus_mesos_cputime_enabled():
                # It is unlikely that the cputime consumed by a task is greater than the CPU limits
                # that we enforce. This is a bug in Mesos (tracked in PAASTA-13510)
                cpu_burst_allowance = (
                    marathon_service_config.get_cpu_quota() /
                    marathon_service_config.get_cpu_period())
                if cputime_delta > time_delta * cpu_burst_allowance:
                    log.warning(
                        'Ignoring potentially bogus cputime values for task {}'
                        .format(str(task_id)))
                    log.debug(
                        'Elapsed time: {}, Enforced CPU limit: {}, CPU time consumed: {}'
                        .format(
                            time_delta,
                            cpu_burst_allowance,
                            cputime_delta,
                        ), )
                    continue

            utilization[task_id] = cputime_delta / time_delta

    if not utilization:
        raise MetricsProviderNoDataError(
            """The mesos_cpu metrics provider doesn't have Zookeeper data for this service.
                                         This is expected for its first run."""
        )

    task_utilization = utilization.values()
    mean_utilization = mean(task_utilization)
    return mean_utilization
示例#49
0
        def do_test():
            if use_greenlets:
                try:
                    from gevent import Greenlet
                    from gevent import monkey

                    # Note we don't do patch_thread() or patch_all() - we're
                    # testing here that patch_thread() is unnecessary for
                    # the connection pool to work properly.
                    monkey.patch_socket()
                except ImportError:
                    outcome.value = SKIP
                    return
    
            cx = get_connection(
                use_greenlets=use_greenlets,
                auto_start_request=False
            )

            db = cx.pymongo_test
            db.test.remove(safe=True)
            db.test.insert({'_id': 1})

            history = []

            def find_fast():
                if use_request:
                    cx.start_request()

                history.append('find_fast start')

                # With the old connection._Pool, this would throw
                # AssertionError: "This event is already used by another
                # greenlet"
                results['find_fast_result'] = list(db.test.find())
                history.append('find_fast done')

                if use_request:
                    cx.end_request()

            def find_slow():
                if use_request:
                    cx.start_request()

                history.append('find_slow start')

                # Javascript function that pauses for half a second
                where = delay(0.5)
                results['find_slow_result'] = list(db.test.find(
                    {'$where': where}
                ))

                history.append('find_slow done')

                if use_request:
                    cx.end_request()

            if use_greenlets:
                gr0, gr1 = Greenlet(find_slow), Greenlet(find_fast)
                gr0.start()
                gr1.start_later(.1)
            else:
                gr0 = threading.Thread(target=find_slow)
                gr1 = threading.Thread(target=find_fast)
                gr0.start()
                time.sleep(0.1)
                gr1.start()

            gr0.join()
            gr1.join()

            self.assertEqual([{'_id': 1}], results['find_slow_result'])

            # Fails, since find_fast doesn't complete
            self.assertEqual([{'_id': 1}], results['find_fast_result'])

            self.assertEqual([
                'find_slow start',
                'find_fast start',
                'find_fast done',
                'find_slow done',
            ], history)

            outcome.value = SUCCESS
示例#50
0
import traceback

import socket
import stem
from stem import Signal
from stem.control import Controller
from stem.socket import ControlPort

from Plugin import PluginManager
from Config import config
from Debug import Debug

if config.tor != "disable":
    from gevent import monkey
    monkey.patch_time()
    monkey.patch_socket(dns=False)
    monkey.patch_thread()
    print "Stem Port Plugin: modules are patched."
else:
    print "Stem Port Plugin: Tor mode disabled. Module patching skipped."


class PatchedControlPort(ControlPort):
    def _make_socket(self):
        try:
            if "socket_noproxy" in dir(
                    socket):  # Socket proxy-patched, use non-proxy one
                control_socket = socket.socket_noproxy(socket.AF_INET,
                                                       socket.SOCK_STREAM)
            else:
                control_socket = socket.socket(socket.AF_INET,
示例#51
0
def on_starting(server):
    # use server hook to patch socket to allow worker reloading
    from gevent import monkey
    monkey.patch_socket()
示例#52
0
import gevent
from gevent import monkey

monkey.patch_socket()  #执行脚本修改阻塞行为
from socket import *


#创建套接字
def server():
    s = socket()
    s.bind(("0.0.0.0", 8000))
    s.listen(10)
    while True:
        c, addr = s.accept()
        print("Connect from", addr)
        # handle(c)
        gevent.spawn(handle, c)


def handle(c):
    while True:
        data = c.recv(1024)
        if not data:
            break
        print(data.decode())
        c.send(b'Receive')
    c.close()


server()
示例#53
0
def start_client():
    import ctypes
    try:
        ctypes.cdll.avbin  # force avbin load
    except:
        pass

    import logging
    import os
    import argparse
    import utils.logging

    parser = argparse.ArgumentParser(prog=sys.argv[0])
    parser.add_argument('--no-update', action='store_true')
    parser.add_argument('--with-gl-errcheck', action='store_true')
    parser.add_argument('--freeplay', action='store_true')
    parser.add_argument('--fastjoin', type=int, default=None)
    parser.add_argument('--dump-gameobj', action='store_true')
    parser.add_argument('--log', default='INFO')
    parser.add_argument('--color-log', action='store_true')
    parser.add_argument('--zoom', type=float, default=1.0)
    parser.add_argument('--show-hidden-modes', action='store_true')

    options = parser.parse_args()

    import options as opmodule
    opmodule.options = options

    IS_PROTON = hasattr(os, 'uname') and os.uname()[:2] == ('Linux', 'Proton')

    import settings
    utils.logging.init(options.log.upper(), settings.SENTRY_DSN, settings.VERSION, IS_PROTON or options.color_log)

    if options.no_update:
        import autoupdate
        autoupdate.Autoupdate = autoupdate.DummyAutoupdate

    log = logging.getLogger('start_client')

    from gevent import monkey
    monkey.patch_socket()
    monkey.patch_os()
    monkey.patch_select()
    monkey.patch_ssl()

    from game import autoenv
    autoenv.init('Client')

    import pyglet

    pyglet.options['shadow_window'] = False

    if not options.with_gl_errcheck:
        pyglet.options['debug_gl'] = False

    from pyglet.gl import gl_info
    if gl_info.get_renderer() == 'GDI Generic':
        ctypes.windll.user32.MessageBoxW(
            0,
            u'你好像没有安装显卡驱动……?这样游戏是跑不起来的。快去安装!',
            u'需要显卡驱动',
            16,
        )
        sys.exit(0)

    if sys.platform.startswith('linux') and options.dump_gameobj:
        import atexit
        import game.base
        atexit.register(game.base.GameObjectMeta._dump_gameobject_hierarchy)
        atexit.register(game.base.EventHandler._dump_eh_dependency_graph)

    from client.ui.entry import start_ui

    # PIL compat
    from PIL import Image
    try:
        Image.frombytes
        Image.Image.tobytes
    except AttributeError:
        log.info('Patching PIL {from,to}bytes')
        Image.frombytes = Image.fromstring
        Image.Image.tobytes = Image.Image.tostring

    # ----------

    try:
        start_ui()
    except KeyboardInterrupt:
        import pyglet
        pyglet.app.exit()
        raise
    except:
        import pyglet
        pyglet.app.exit()

        if options.fastjoin:
            import pdb
            pdb.post_mortem()

        log.exception(u'UI线程崩溃,正在报告bug,请稍等下……')
        from utils.stats import stats
        stats({'event': 'crash'})

        raise
示例#54
0
import gevent
from gevent import monkey
monkey.patch_socket() #执行脚本修改阻塞行为,在导入模块前执行
from socket import *

#创建套接字
def server():
    s = socket()
    s.bind(("0.0.0.0",8888))
    s.listen(6)
    while 1:
        c,addr = s.accept()
        print("Connect from",addr)
        # handle(c)
        gevent.spawn(handle,c)



#处理客户端
def handle(c):
    while 1:
        data = c.recv(1024)
        if not data:
            break
        print(data.decode())
        c.send(b"Receive")



server()
示例#55
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from gevent import monkey #the best thing to do is put this import in manage.py
monkey.patch_socket()

import logging
import requests
import tablib
import zlib
import re
import sys
from collections import namedtuple
import htmlentitydefs
from BeautifulSoup import BeautifulSoup, NavigableString
from redis import Redis
from urlparse import urlparse, parse_qs

import gevent
from gevent.queue import JoinableQueue, Queue

logging.basicConfig(level=logging.WARNING)

Project = namedtuple('Project', ['theme', 'activities', 'acronym',
                                 'start_date', 'end_date',
                                 'cost', 'funding',
                                 'status', 'contract_type',
                                 'coordinator', 'partners',
                                 'contact_person', 'reference', 'record'])

NUM_THEME_WORKER_THREADS = 4
NUM_PROJECT_WORKER_THREADS = 10
示例#56
0
import gevent
from gevent import monkey
# 必须放在socket之前导入 因为原理是第三方代码执行脚本 修改默认的阻塞行为
monkey.patch_socket()  #如果不限定socket 可以patch_all()
from socket import *


# 创建套接字
def Seerver():
    # 默认为TCP 套接字
    server = socket()
    ADDRESS = ('0.0.0.0', 8888)
    server.bind(ADDRESS)
    server.listen(10)
    print("正在等待客户端连接")
    while True:
        client, addr = server.accept()
        print(addr, "连接过来了")
        # 处理客户端请求函数
        # handle(client)
        # 协程,接收多个客户端的连接实现并发 比单线程高 但是不如多线程 多个协程在不同客户端切换 遇到阻塞就干别的线程
        gevent.spawn(handle, client)


def handle(client):
    while True:
        data = client.recv(1024)  #在这里阻塞了 来回切换
        print("客户端说 %s" % (data.decode()))
        if not data:
            break
        client.send('服务端收到'.encode())