Example #1
0
def run(app, host=None, port=None, debug=None, logger=logger):
    monkey.patch_all()

    address = get_address(host, port, app.config['SERVER_NAME'])

    if debug is not None:
        app.debug = debug

    if app.debug:
        #app.wsgi_app = DebuggedApplication(app.wsgi_app, evalex=True)
        server = WebSocketServer(address, app.wsgi_app, debug=debug)
        server._logger = logger

        logger.setLevel(logging.INFO)
        def run_server():
            server.serve_forever()
        if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
            server.logger.info(' * Running on http://%s:%d/' % address)
            server.logger.info(' * WebSocket enabled')
        run_with_reloader(run_server)
    else:
        server = WebSocketServer(address, app.wsgi_app, debug=debug)
        server._logger = logger
        server.serve_forever()
    return server
Example #2
0
def run(config):
    import atexit
    import sys

    # Reset sys.modules so that g-event can re-monkeypatch.
    # This is needed because distribute's pkg_resources imports urllib & co, before we can properly monkey patch it. ;(
    modules_to_reset = {'urllib', 'socket', '_ssl', 'ssl', 'select', 'thread',
                        'threading', 'time', 'os', 'subprocess'}
    for k in list(sys.modules.keys()):
        if k.startswith('dissonance.') or k in modules_to_reset:
            del sys.modules[k]

    from gevent.monkey import patch_all

    patch_all()

    from .dissonance import Dissonance
    import logging

    logging.basicConfig(**getattr(config, 'logging', {}))
    j = Dissonance(config)

    try:
        j.run()
        atexit.register(j.stop)
        j.join()

    except KeyboardInterrupt:
        print("Got ^C. Stopping!")
Example #3
0
def monkey_patch_gevent():
    """Monkey-patch gevent into core and zmq."""
    try:
        from gevent import monkey
    except ImportError:
        return
    monkey.patch_all()
    try:
        import zmq
        import zmq.eventloop
        import zmq.eventloop.ioloop
        import zmq.eventloop.zmqstream
        import zmq.green
        import zmq.green.eventloop
        import zmq.green.eventloop.ioloop
        import zmq.green.eventloop.zmqstream
    except ImportError:
        return
    TO_PATCH = ((zmq, zmq.green),
                (zmq.eventloop, zmq.green.eventloop),
                (zmq.eventloop.ioloop, zmq.green.eventloop.ioloop),
                (zmq.eventloop.zmqstream, zmq.green.eventloop.zmqstream))
    for (red, green) in TO_PATCH:
        for name in dir(red):
            redval = getattr(red, name)
            if name.startswith('__') or type(redval) is type(zmq):
                continue
            try:
                greenval = getattr(green, name)
            except AttributeError:
                continue
            if redval is not greenval:
                setattr(red, name, greenval)
    def handle(self, *args, **options):
        if not options['noinput']:
            confirm = raw_input("""
        ### %s Fast Reindex !!! ###
        You have requested to do a fluff index reset via fast track.
        This will update all your fluff indicators in place.

        Type 'yes' to continue, or 'no' to cancel: """ % self.pillow_class.__name__)

            if confirm != 'yes':
                print "\tReset cancelled."
                return

        from gevent.monkey import patch_all
        patch_all()

        self._bootstrap(options)
        start = datetime.utcnow()

        gevent.signal(signal.SIGQUIT, gevent.shutdown)
        queue = Queue(POOL_SIZE)
        workers = [gevent.spawn(worker, self, queue) for i in range(POOL_SIZE)]

        print "Starting fast tracked reindexing"
        for i, row in enumerate(self.full_couch_view_iter()):
            queue.put((row, i))

        gevent.joinall(workers)

        end = datetime.utcnow()
        print "done in %s seconds" % (end - start).seconds
Example #5
0
    def _open(self, scheme='mongodb://'):
        hostname, dbname, options = self._parse_uri(scheme=scheme)

        conf = self._prepare_client_options(options)
        conf['host'] = hostname

        env = _detect_environment()
        if env == 'gevent':
            from gevent import monkey
            monkey.patch_all()
        elif env == 'eventlet':
            from eventlet import monkey_patch
            monkey_patch()

        mongoconn = MongoClient(**conf)
        database = mongoconn[dbname]

        version = mongoconn.server_info()['version']
        if tuple(map(int, version.split('.')[:2])) < (1, 3):
            raise NotImplementedError(
                'Kombu requires MongoDB version 1.3+ (server is {0})'.format(
                    version))

        self._create_broadcast(database, options)

        self._client = database
Example #6
0
def server(host=RESTAPI_DEFAULT_ADDRESS, port=RESTAPI_DEFAULT_PORT, adapter=RESTAPI_DEFAULT_ADAPTER):
    """
    REST-JSON API server
    """
    DataStore.admin_id = hexencode(os.urandom(16))
    Database.filepath = tempfile.mkstemp(prefix="sqlmapipc-", text=False)[1]
    #make adminid to known this is safe because api only avalible to local
    file_object = open('/www/xseclab.com/termite/.sqlmapadminid', 'w')
    file_object.write(DataStore.admin_id)
    file_object.close( )

    logger.info("Running REST-JSON API server at '%s:%d'.." % (host, port))
    logger.info("Admin ID: %s" % DataStore.admin_id)
    logger.debug("IPC database: %s" % Database.filepath)

    # Initialize IPC database
    DataStore.current_db = Database()
    DataStore.current_db.connect()
    DataStore.current_db.init()

    # Run RESTful API
    try:
        if adapter == "gevent":
            from gevent import monkey
            monkey.patch_all()
        elif adapter == "eventlet":
            import eventlet
            eventlet.monkey_patch()
        logger.debug("Using adapter '%s' to run bottle" % adapter)
        run(host=host, port=port, quiet=True, debug=False, server=adapter)
    except socket.error, ex:
        if "already in use" in getSafeExString(ex):
            logger.error("Address already in use ('%s:%s')" % (host, port))
        else:
            raise
Example #7
0
def search_index_service(db):
    # Based on the syncback_service fixture in tests/util/base.
    monkey.patch_all(aggressive=False)
    from inbox.transactions.search import SearchIndexService
    s = SearchIndexService(poll_interval=0.1)
    s.start()
    yield s
Example #8
0
    def setup(cls):
        from gevent import monkey
        monkey.noisy = False
        monkey.patch_all()

        # monkey patch sendfile to make it none blocking
        patch_sendfile()
Example #9
0
def run_server(instance_dir, address, port, access_log=False):

    # workaround on osx, disable kqueue
    if sys.platform == "darwin":
        os.environ['EVENT_NOKQUEUE'] = "1"

    sys.path.insert(0, instance_dir)

    os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'

    # This application object is used by any WSGI server configured to use this
    # file. This includes Django's development server, if the WSGI_APPLICATION
    # setting points here.
    from django.core.wsgi import get_wsgi_application
    application = get_wsgi_application()

    from gevent import monkey
    monkey.patch_all(dns=False)
    from gevent.pywsgi import WSGIServer

    if access_log:
        logfile = 'default'
    else:
        logfile = file(os.devnull, 'a+')

    server = WSGIServer((address, port), application, log=logfile)
    try:
        server.serve_forever()
    except KeyboardInterrupt:
        server.stop()
Example #10
0
def basic(topic="topic", channel="channel", count=1e6, size=10, gevent=False, max_in_flight=2500, profile=False):
    """Basic benchmark"""
    if gevent:
        from gevent import monkey

        monkey.patch_all()

    # Check the types of the arguments
    count = int(count)
    size = int(size)
    max_in_flight = int(max_in_flight)

    from nsq.http import nsqd
    from nsq.reader import Reader

    print "Publishing messages..."
    for batch in grouper(messages(count, size), 1000):
        nsqd.Client("http://localhost:4151").mpub(topic, batch)

    print "Consuming messages"
    client = Reader(topic, channel, nsqd_tcp_addresses=["localhost:4150"], max_in_flight=max_in_flight)
    with closing(client):
        start = -time.time()
        if profile:
            with profiler():
                for message in islice(client, count):
                    message.fin()
        else:
            for message in islice(client, count):
                message.fin()
        start += time.time()
    print "Finished %i messages in %fs (%5.2f messages / second)" % (count, start, count / start)
Example #11
0
    def _open(self, scheme='mongodb://'):
        hostname, dbname, options = self._parse_uri(scheme=scheme)

        conf = self._prepare_client_options(options)
        conf['host'] = hostname

        env = _detect_environment()
        if env == 'gevent':
            from gevent import monkey
            monkey.patch_all()
        elif env == 'eventlet':
            from eventlet import monkey_patch
            monkey_patch()

        mongoconn = MongoClient(**conf)
        database = mongoconn[dbname]

        version_str = mongoconn.server_info()['version']
        version = tuple(map(int, version_str.split('.')))

        if version < (1, 3):
            raise VersionMismatch(E_SERVER_VERSION.format(version_str))
        elif self.ttl and version < (2, 2):
            raise VersionMismatch(E_NO_TTL_INDEXES.format(version_str))

        return database
Example #12
0
def gevent_run(app, port=5000, log=None, error_log=None, address='',
               monkey_patch=True, start=True, **kwargs):  # pragma: no cover
    """Run your app in gevent.wsgi.WSGIServer

    :param app: wsgi application, ex. Microservice instance
    :param port: int, listen port, default 5000
    :param address: str, listen address, default: ""
    :param log: logger instance, default app.logger
    :param error_log: logger instance, default app.logger
    :param monkey_patch: boolean, use gevent.monkey.patch_all() for patching standard modules, default: True
    :param start: boolean, if True, server will be start (server.serve_forever())
    :param kwargs: other params for WSGIServer(**kwargs)
    :return: server
    """
    if log is None:
        log = app.logger
    if error_log is None:
        error_log = app.logger
    if monkey_patch:
        from gevent import monkey

        monkey.patch_all()

    from gevent.wsgi import WSGIServer
    http_server = WSGIServer((address, port), app, log=log, error_log=error_log,
                             **kwargs)
    if start:
        http_server.serve_forever()
    return http_server
Example #13
0
def main():
    parser = get_parser()
    options, _ = parser.parse_args()

    if options.GEVENT:
        from seismograph.utils import pyv
        pyv.check_gevent_supported()

        from gevent.monkey import patch_all
        patch_all(thread=False)

    from seismograph.ext.mocker import SERVER_TYPES

    try:
        mock_server_class = SERVER_TYPES[options.SERVER_TYPE]
    except KeyError:
        raise ValueError('Incorrect server type')

    server = mock_server_class(
        options.MOCKS_DIR,
        host=options.HOST,
        port=options.PORT,
        debug=options.NO_DEBUG,
        gevent=options.GEVENT,
        threading=options.THREADING,
        multiprocessing=options.MULTIPROCESSING,
    )
    server.serve_forever()
Example #14
0
def patch_all(gevent=True, pdb=True, **kwargs):
    if gevent:
        patch_gevent()
    if pdb:
        patch_pdb()
    from gevent.monkey import patch_all
    patch_all(**kwargs)
Example #15
0
def taskIncStats(stats):
    print "taskIncStats(stats)"
    GEVENT_MONKEY_PATCH = settings.TILEJET_GEVENT_MONKEY_PATCH
    #=======#
    now = datetime.datetime.now()
    if GEVENT_MONKEY_PATCH:
        # Import Gevent and monkey patch
        from gevent import monkey
        monkey.patch_all()
    # Update MongoDB
    from pymongo import MongoClient
    client = None
    db = None
    try:
        client = MongoClient(settings.TILEJET_DBHOST, settings.TILEJET_DBPORT)
        db = client[settings.TILEJET_DBNAME]
    except:
        client = None
        db = None
        errorline = "Error: Could not connet to stats database from taskIncStats. Most likely issue with connection pool"
        error_file = settings.LOG_ERRORS_ROOT+os.sep+"requests_tiles_"+now.strftime('%Y-%m-%d')+"_errors.txt"
        with open(error_file,'a') as f:
            f.write(errorline+"\n")

    # Increment Statistics
    if client and db:
        for stat in stats:
            try:
                collection = db[stat['collection']]
                collection.update(stat['attributes'], {'$set': stat['attributes'], '$inc': {'value': 1}}, upsert=True, w=0)
            except:
                errorline = "Error: Could not connect to upsert stats from taskIncStats.  Most likely issue with sockets"
                error_file = settings.LOG_ERRORS_ROOT+os.sep+"requests_tiles_"+now.strftime('%Y-%m-%d')+"_errors.txt"
                with open(error_file,'a') as f:
                    f.write(errorline+"\n")
Example #16
0
 def gevent(app, address, **options):
     from gevent import monkey
     monkey.patch_all()
     from gevent import pywsgi
     from gevent.pool import Pool
     pywsgi.WSGIServer(address, app, spawn='workers' in options and Pool(
         int(options.workers)) or 'default').serve_forever()
Example #17
0
def taskIncStats(stats):
    now = datetime.datetime.now()
    # Import Gevent and monkey patch
    from gevent import monkey
    monkey.patch_all()
    # Update MongoDB
    from pymongo import MongoClient
    client = None
    db = None
    try:
        #client = MongoClient('localhost', 27017)
        client = MongoClient('/tmp/mongodb-27017.sock')
        db = client.ittc
    except:
        client = None
        db = None
        errorline = "Error: Could not connet to stats database from taskIncStats. Most likely issue with connection pool"
        error_file = settings.LOG_ERRORS_ROOT+os.sep+"requests_tiles_"+now.strftime('%Y-%m-%d')+"_errors.txt"
        with open(error_file,'a') as f:
            f.write(errorline+"\n")

    # Increment Statistics
    if client and db:
        for stat in stats:
            try:
                collection = db[stat['collection']]
                collection.update(stat['attributes'], {'$set': stat['attributes'], '$inc': {'value': 1}}, upsert=True, w=0)
            except:
                errorline = "Error: Could not connect to upsert stats from taskIncStats.  Most likely issue with sockets"
                error_file = settings.LOG_ERRORS_ROOT+os.sep+"requests_tiles_"+now.strftime('%Y-%m-%d')+"_errors.txt"
                with open(error_file,'a') as f:
                    f.write(errorline+"\n")
Example #18
0
def run_app(app_path, **kwargs):
    """
    Command for run application.

    :usage:
        noseapp-manage run import.path.to:get_app_function

    :app_path: path for importing application
    :kwargs: kwargs of create application function
    """
    for argv in sys.argv:
        if 'gevent' in argv:
            if pyv.IS_PYTHON_3:
                raise pyv.UnSupportedError('gevent lib unsupported with python 3')

            try:
                from gevent.monkey import patch_all
                patch_all()
            except ImportError as e:
                raise ImportError(
                    '{}. {}.'.format(e.message, 'Please install gevent'),
                )

            break

    app = funcs.get_create_app_func(app_path)(**kwargs)
    app.run()
Example #19
0
def run(app, host, port):
    print_banner()

    monkey.patch_all()

    server = setup_server(app, host, port)
    server.serve_forever()
Example #20
0
def server(args):
    # import gevent and monkey patch here so only the gevent server is
    # affected.
    from gevent import monkey
    monkey.patch_all()
    config.OPTIONS['debug'] = args.debug
    bottle.run(app=webapp.app(), server='gevent', debug=args.debug)
Example #21
0
def main():
    from gevent import monkey; monkey.patch_all()
    from .orchestra import monkey; monkey.patch_all()

    import logging

    log = logging.getLogger(__name__)

    ctx = parse_args()

    loglevel = logging.INFO
    if ctx.verbose:
        loglevel = logging.DEBUG

    logging.basicConfig(
        level=loglevel,
        )

    from teuthology.misc import read_config
    read_config(ctx)

    log.info('\n  '.join(['targets:', ] + yaml.safe_dump(ctx.config['targets'], default_flow_style=False).splitlines()))

    if ctx.owner is None:
        from teuthology.misc import get_user
        ctx.owner = get_user()

    nuke(ctx, log, ctx.unlock, ctx.synch_clocks, ctx.reboot_all)
def main(argv):
    """
    Точка входа в приложение.

    В случае возникновения ошибки в приложении, оно засыпает на config.SLEEP_ON_FAIL секунд.

    :param argv: агрументы командной строки.
    :type argv: list
    """
    args = utils.parse_cmd_args(argv[1:])
    config = utils.prepare(args)

    dictConfig(config.LOGGING)
    current_thread().name = 'pusher.main'
    install_signal_handlers()
    patch_all()

    while run_application:
        try:
            main_loop(config)
        except Exception as exc:
            logger.error(
                'Error in main loop. Go to sleep on {} second(s).'.format(config.SLEEP_ON_FAIL)
            )
            logger.exception(exc)

            sleep(config.SLEEP_ON_FAIL)
    else:
        logger.info('Stop application loop in main.')

    return exit_code
Example #23
0
def main():
    monkey.patch_all()

    parser = argparse.ArgumentParser()
    parser.add_argument('-p', '--project_list', default=None, required=False)
    parser.add_argument('-n', '--num_tasks', default=1, required=False)
    parser.add_argument('-k', '--asana_api_key', default=ASANA_API_KEY)
    parser.add_argument('-w', '--workspace', default=None, required=False)
    args = parser.parse_args()

    asana_api = asana.AsanaAPI(debug=True)

    # collect tasks from project
    universal_result_list = list()
    if project_list == None:
        project_list = [project['id'] for project in asana_api.list_projects(include_archived=False)]
    else:
        pass

    pool = Pool(MAX_GREENLETS)
    [pool.spawn(get_asana_project, asana_api, project_id, num_projects) for project_id in project_list]
    pool.join()

    # select task, url at random from each project
    # remove the parent map rows first!
    selected_tasks = []    
Example #24
0
def main():
    formatter = logging.Formatter(
        '[%(asctime)s] %(name)s<%(levelname)s> %(message)s')
    handler = logging.StreamHandler()
    handler.setFormatter(formatter)
    logger.addHandler(handler)
    logger.setLevel(logging.INFO)

    args = get_parser().parse_args()

    if args.subworker and args.worker != 'process':
        logger.error('using subworkers requires worker set to be "process"')
        return

    if args.worker == 'thread':
        from concurrent.futures import ThreadPoolExecutor
        e = ThreadPoolExecutor(args.concurrency)
        for _ in range(args.concurrency):
            e.submit(run_worker, args.odq, args.queue, args.worker)

    elif args.worker == 'process':
        from concurrent.futures import ProcessPoolExecutor
        e = ProcessPoolExecutor(args.concurrency)
        for _ in range(args.concurrency):
            e.submit(run_worker, args.odq, args.queue, args.worker,
                     args.subworker, args.subconcurrency)

    elif args.worker == 'gevent':
        from gevent import monkey
        monkey.patch_all()
        from gevent.pool import Pool
        pool = Pool(args.concurrency)
        for _ in range(args.concurrency):
            pool.spawn(run_worker, args.odq, args.queue, args.worker)
        pool.join()
 def target():
     tcurrent = threading.current_thread()
     monkey.patch_all()
     tcurrent2 = threading.current_thread()
     self.assertIsNot(tcurrent, current)
     # We get a dummy thread now
     self.assertIsNot(tcurrent, tcurrent2)
Example #26
0
def test():
    scheduler = BaseScheduler()
    def add_new_task(task):
        scheduler.add_new_task(task)

    import sys
    if 'threading' in sys.modules:
        del sys.modules['threading']
    from gevent import monkey
    monkey.patch_all()
    from gevent.pool import Pool
    gp = Pool(100)
    import time
    st = time.time()
    tasks = []
    for i in xrange(1, 1000):
        t = {
            'uuid': i
            }
        tasks.append(t)
    gp.spawn(add_new_task, tasks)
    gp.join()

    end = time.time()
    tmp = list(iter(scheduler.new_tasks_generator()))
    cnt = len(tmp)
    while tmp:
    #for i in xrange(1000):
        tmp = list(iter(scheduler.new_tasks_generator()))
        cnt += len(tmp)

    print [ i in scheduler.rfilter for i in xrange(1, 1000) ]
Example #27
0
 def test_3(self):
     '''Submit to job queue with gevent.'''
     
     monkey.patch_all()
     gm_client = gearman.GearmanClient([self.gm_server])
     for _ in range(self.jobs):            
         gm_client.submit_job("with_gevent",self.data,background=True)
Example #28
0
def commit_to_cache(cache_location, cache_params, key, obj, GEVENT_MONKEY_PATCH=False):
    if GEVENT_MONKEY_PATCH:
        # Import Gevent and monkey patch
        try:
            from gevent import monkey
            monkey.patch_all()
        except:
            print "gevent monkey patch failed"

    # Import Django Cache (mozilla/django-memcached-pool)
    #from django.core.cache import cache, caches, get_cache
    #from django.core.cache import caches
    # Get Tile Cache
    cache = None
    success = False
    try:
        from umemcache import MemcachedError
        from memcachepool.cache import UMemcacheCache
        cache = UMemcacheCache(cache_location, cache_params)
        #cache = caches['tiles']
    except:
        cache = None

    if cache:
        try:
            cache.set(key, obj)
            success = True
        except MemcachedError, e:
            print e
            success = False
Example #29
0
def get_from_cache(cache_location, cache_params, name, key, GEVENT_MONKEY_PATCH=False):
    if GEVENT_MONKEY_PATCH:
        # Import Gevent and monkey patch
        try:
            from gevent import monkey
            monkey.patch_all()
        except:
            print "gevent monkey patch failed"
    # Import Django Cache (mozilla/django-memcached-pool)
    #from django.core.cache import cache, caches, get_cache
    #from django.core.cache import caches
    # Get Tile Cache
    cache = None
    item = None
    try:
        from umemcache import MemcachedError
        from memcachepool.cache import UMemcacheCache
        cache = UMemcacheCache(cache_location, cache_params)
        #cache = caches['tiles']
    except:
        cache = None

    if cache:
        try:
            item = cache.get(key)
	except socket_error, e:
            print e
            item = None        
        except MemcachedError, e:
            print e
            item = None
Example #30
0
File: asis.py Project: seomoz/asis
    def greenlet(self):
        '''Run an asis server in a greenlet.'''
        # Ensure that monkey-patching has happened before running the server.
        # We avoid aggressively monkey-patching at the top of the file since
        # this class may be used from many contexts, including potentially
        # without the advent of `gevent`.
        from gevent import monkey
        monkey.patch_all()

        import gevent
        spawned = gevent.Greenlet.spawn(self.run)
        try:
            # Wait until something is listening on the specified port. Two
            # outcomes are possible -- an exception happens and the greenlet
            # terminates, or it starts the server and is listening on the
            # provided port.
            while spawned:
                if self.check_ready():
                    break

            # If the greenlet had an exception, re-raise it in this context
            if not spawned:
                raise spawned.exception

            yield spawned
        finally:
            spawned.kill(KeyboardInterrupt, block=True)
            spawned.join()
Example #31
0
This module contains an asynchronous replica of ``requests.api``, powered
by gevent. All API methods return a ``Request`` instance (as opposed to
``Response``). A list of requests can be sent with ``map()``.
"""
from functools import partial
import traceback
try:
    import gevent
    from gevent import monkey as curious_george
    from gevent.pool import Pool
except ImportError:
    raise RuntimeError('Gevent is required for grequests.')

# Monkey-patch.
curious_george.patch_all(thread=False, select=False)

from requests import Session

__all__ = ('map', 'imap', 'get', 'options', 'head', 'post', 'put', 'patch',
           'delete', 'request')


class AsyncRequest(object):
    """ Asynchronous request.

    Accept same parameters as ``Session.request`` and some additional:

    :param session: Session which will do request
    :param callback: Callback called on response.
                     Same as passing ``hooks={'response': callback}``
Example #32
0
from __future__ import unicode_literals
import re
import os
import sys
import json
import glob
import random
import gevent
import logging
from gevent.pool import Pool
from gevent.monkey import patch_all
from redis import StrictRedis
from importlib import import_module
from slackclient import SlackClient
from settings import APPS, BOT_NAME, ICON_URL, SLACK_TOKEN, REDIS_URL
patch_all()

pool = Pool(20)

CMD_PREFIX = ''
logger = logging.getLogger()


class RedisBrain(object):
    def __init__(self):
        try:
            self.redis = StrictRedis(host=REDIS_URL)
        except Exception as e:
            logger.error(e)
            self.redis = None
Example #33
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# 通过urllib(2)模块下载网络内容
import urllib, urllib2, gevent
# 引入正则表达式模块,时间模块
import re, time
from gevent import monkey

monkey.patch_all()


def geturllist(url):
    url_list = []
    print
    url
    s = urllib2.urlopen(url)
    text = s.read()
    # 正则匹配,匹配其中的图片
    html = re.search(r'<ol.*</ol>', text, re.S)
    urls = re.finditer(r'<p><img src="(.+?)jpg" /></p>', html.group(), re.I)
    for i in urls:
        url = i.group(1).strip() + str("jpg")
        url_list.append(url)
    return url_list


def download(down_url):
    name = str(time.time())[:-3] + "_" + re.sub('.+?/', '', down_url)
    print
    name
    urllib.urlretrieve(down_url, "D:\\TEMP\\" + name)
Example #34
0
    def __init__(self,
                 oag,
                 initurl=None,
                 rpc_enabled=True,
                 rpc_acl_policy=ACL.LOCAL_ALL,
                 rpc_async=True,
                 rpc_dbupdate_listen=False,
                 heartbeat_enabled=True):

        ### Store reference to OAG
        self._oag = weakref.ref(oag)

        # Greenlets spawned by RPC
        self._glets = []

        ### Spin up rpc infrastructure

        # Imports
        from gevent import spawn, monkey
        from gevent.lock import BoundedSemaphore

        # Is RPC initialization complete?
        self._rpc_init_done = False

        # A very basic question...
        self._rpc_enabled = rpc_enabled

        # Who's allowed to access this node?
        self._rpc_acl_policy = rpc_acl_policy

        # Serialize access to RPC
        self._rpcsem = BoundedSemaphore(1)

        # Routes all incoming RPC requests (dead to start with)
        self._rpcrtr = None

        # Registrations received from other OAGs
        self._rpcreqs = {}

        # Async
        self._rpc_async = rpc_async

        # Holding spot for RPC discoverability - default off
        self._rpc_discovery = None

        # Listen for dbupdates elsewhere
        self._rpc_dbupdate_listen = rpc_dbupdate_listen

        # Should you heartbeat?
        self._rpc_heartbeat = heartbeat_enabled

        # Invalidation cache
        self._rpc_transaction = RpcTransaction(self)

        # Stoplist of OAG streams that shouldn't be exposed over RPC
        self._rpc_stop_list = [
            'cache',
            'db',
            'discoverable',
            'logger',
            'propmgr',
            'rdf',
            'rpc',
        ] + [attr for attr in dir(self._oag) if attr[0] == '_']

        ### Set up OAG proxying infrastructure

        # Are we proxying for another OAG?
        self._proxy_mode = False

        # OAG URL this one is proxying
        self._proxy_url = initurl

        # List of props we are making RPC calls for
        self._proxy_oags = []

        ### Carry out ininitialization

        # Is this OAG RPC enabled? If no, don't proceed
        if not self._rpc_enabled:
            return

        # RPC routing
        if not self._rpc_init_done:

            monkey.patch_all()

            with self._rpcsem:
                self._rpcrtr = OAGRPC_RTR_Requests(self._oag)
                self._rpcrtr.start()
                if self._rpc_async:
                    # Force execution of newly spawned greenlets
                    g = spawn(self.start)
                    g.name = "%s/%s" % (str(self._rpcrtr.id), self._oag)
                    self._glets.append(g)
                    gevent.sleep(0)

            # Avoid double RPC initialization
            self._rpc_init_done = True

        # Proxying
        if self._proxy_url:
            self._proxy_mode = True
Example #35
0
#@author:九世
#@file:scan.py
#@time: 2019/9/23

from gevent import monkey;monkey.patch_all()
from multiprocessing import Process
import requests
import sys
import gevent
import os

class Gz(object):
    def __init__(self):
        self.guanjianzi='haq5201314'
        self.payload='c3lzdGVtKCJlY2hvIGhhcTUyMDEzMTQiKTs='
        self.headers={'user-agent':'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3314.0 Safari/537.36 SE 2.X MetaSr 1.0',
                      'Accept-Encoding':'gzip,deflate',
                      'Accept-Charset':'c3lzdGVtKCJlY2hvIGhhcTUyMDEzMTQiKTs='}

        self.calc=0
        self.djcs=[]
        self.xcs=[]
    def scan_query(self,url):
        try:
            rqt=requests.get(url=url,headers=self.headers)
            if self.guanjianzi in rqt.text:
                print('[ok] 存在PHPstudy 后门漏洞,url:{}'.format(rqt.url))
            else:
                print('[no] 不存在后门,url:{}'.format(rqt.url))
        except Exception as r:
            print('[Error] {}'.format(r))
from gevent import monkey  # isort:skip
monkey.patch_all()  # isort:skip

from time import sleep  # noqa: E402

from simple_amqp import AmqpMsg, AmqpParameters  # noqa: E402
from simple_amqp.gevent import GeventAmqpConnection  # noqa: E402

FIRST_COUNT = 0
SECOND_COUNT = 0


def consumer(msg: AmqpMsg):
    global FIRST_COUNT
    global SECOND_COUNT
    payload = msg.payload
    payload = payload.decode()
    if payload == 'first':
        FIRST_COUNT += 1
    if payload == 'second':
        SECOND_COUNT += 1

    print('msg received: {}'.format(payload))

    # acknowledge that the message was received correctly
    return True


def main():
    conn = GeventAmqpConnection(AmqpParameters())
    channel = conn.channel()
Example #37
0
# Make external libs work with gevent, but still enable real threading
from gevent import monkey  # isort:skip

monkey.patch_all(thread=False)  # noqa: E702 isort:skip
# Make postgresql usable with gevent
import psycogreen.gevent  # isort:skip

psycogreen.gevent.patch_psycopg()  # noqa: E702 isort:skip
import json
import logging
import logging.config
import os

import click
import toml
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler

from relay.relay import TrustlinesRelay
from relay.utils import get_version

from .api.app import ApiApp

logger = logging.getLogger("trustlines")


def patch_warnings_module():
    """patch the warnings modules simplefilter function

    the web3 module prints excessive deprecation warnings. They call
Example #38
0
from gevent import monkey  # isort:skip # noqa

monkey.patch_all(subprocess=False, thread=False)  # isort:skip # noqa

import os.path
import sys

import click
import structlog
from request_collector.server import RequestCollector

from monitoring_service.database import SharedDatabase
from raiden.utils.cli import NetworkChoiceType
from raiden_libs.cli import common_options, setup_sentry

log = structlog.get_logger(__name__)


@click.command()
@click.option(
    "--chain-id",
    type=NetworkChoiceType(
        ["mainnet", "ropsten", "rinkeby", "goerli", "kovan", "<NETWORK_ID>"]),
    required=True,
    show_default=True,
    help=(
        "Specify the chain name/id of the Ethereum network to run Raiden on.\n"
        "Available networks:\n"
        '"mainnet" - network id: 1\n'
        '"ropsten" - network id: 3\n'
        '"rinkeby" - network id: 4\n'
Example #39
0
from gevent import monkey

monkey.patch_all(thread=False)

import psycogreen.gevent

psycogreen.gevent.patch_psycopg()

import pytest
from flask.testing import FlaskClient

from app import create_app
from .utils import JSONResponse


@pytest.fixture(scope='module')
def flask_app():
    app = create_app(environment='testing')
    from app.database import db

    with app.app_context():
        db.create_all()
        yield app
        db.session.close_all()
        db.drop_all()


@pytest.fixture(scope='module')
def client(flask_app):
    app = flask_app
    ctx = flask_app.test_request_context()
Example #40
0
# Included modules
import os
import sys
import time
import logging

# Third party modules
import gevent
from gevent import monkey
if "patch_subprocess" in dir(monkey):
    monkey.patch_all(thread=False, subprocess=False)
else:
    monkey.patch_all(thread=False)
# Not thread: pyfilesystem and systray icon, Not subprocess: Gevent 1.1+

update_after_shutdown = False  # If set True then update and restart zeronet after main loop ended

# Load config
from Config import config

config.parse(silent=True)  # Plugins need to access the configuration
if not config.arguments:  # Config parse failed, show the help screen and exit
    config.parse()

# Create necessary files and dirs
if not os.path.isdir(config.log_dir):
    os.mkdir(config.log_dir)
if not os.path.isdir(config.data_dir):
    os.mkdir(config.data_dir)
if not os.path.isfile("%s/sites.json" % config.data_dir):
    open("%s/sites.json" % config.data_dir, "w").write("{}")
Example #41
0
#!/bin/python
import configparser, time, json, os, socket
import paho.mqtt.client as mqtt
from bottle import route, request, run, Jinja2Template, jinja2_view, static_file, GeventServer, response, get, HTTPResponse
from gevent import monkey, sleep; monkey.patch_all()

# Get current time in seconds
sec_time = lambda: int(time.time())

config = configparser.ConfigParser()
server_ip = "0.0.0.0"
server_port = 8080
mqtt_broker = server_ip
broker_port = 1883

modules = []
rooms = []

def parse_bool(s):
	if str(s).lower() in ('yes', 'true', 't', 'y', '1'):
		return True
	elif str(s).lower() in ('no', 'false', 'f', 'n', '0'):
		return False

def on_mqtt_message(client, userdata, message):
	for module in modules:
		light_topic = "intarnetto/module_" + module.code + "/status"
		mpd_topic = "mpd/module_" + module.code + "/status"
		if message.topic == light_topic:
			m_message = str(message.payload.decode("utf-8"))
			module.set_status(parse_bool(m_message))
Example #42
0
#-*- coding: utf-8 -*-
import gevent
from gevent import monkey
monkey.patch_all(thread=True)
import os
import sys
from os.path import abspath, dirname
sys.path.insert(0, dirname(dirname(abspath(__file__))))
import datetime
import matplotlib
matplotlib.use('Agg')
import const as ct
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from backlooking.cdoc import CDoc
from matplotlib import style
from climit import CLimit
from cindex import CIndex
from cmysql import CMySQL
from datetime import datetime
from rstock import RIndexStock
from base.clog import getLogger
from datamanager.margin import Margin
from rindustry import RIndexIndustryInfo
from industry_info import IndustryInfo
from datamanager.emotion import Emotion
from datamanager.sexchange import StockExchange
from common import create_redis_obj, get_chinese_font, get_tushare_client, get_day_nday_ago

Example #43
0
# fmt: off
from gevent.monkey import patch_all; patch_all()
# fmt: on
import base64
import hashlib
import json
import os
import traceback
from itertools import chain
from urllib.parse import parse_qsl

import redis
import requests
from pywb.apps.frontendapp import FrontEndApp
from pywb.apps.wbrequestresponse import WbResponse
from pywb.manager.manager import CollectionsManager
from pywb.rewrite.templateview import BaseInsertView
from warcio.timeutils import timestamp_now, timestamp_to_iso_date
from werkzeug.routing import Rule

EMPTY_LIST = []
EMPTY_DICT = {}
SKIPPED_NODES = {'script', 'style'}
TITLE = 'title'
TEXT = '#text'


# ============================================================================
def extract_text(node, metadata=None):
    node_name = node.get('nodeName', '').lower()
    if node_name not in SKIPPED_NODES:
Example #44
0
    def run(self, app, host=None, port=None, **kwargs):
        """Run the SocketIO web server.

        :param app: The Flask application instance.
        :param host: The hostname or IP address for the server to listen on.
                     Defaults to 127.0.0.1.
        :param port: The port number for the server to listen on. Defaults to
                     5000.
        :param debug: ``True`` to start the server in debug mode, ``False`` to
                      start in normal mode.
        :param use_reloader: ``True`` to enable the Flask reloader, ``False``
                             to disable it.
        :param extra_files: A list of additional files that the Flask
                            reloader should watch. Defaults to ``None``
        :param log_output: If ``True``, the server logs all incomming
                           connections. If ``False`` logging is disabled.
                           Defaults to ``True`` in debug mode, ``False``
                           in normal mode. Unused when the threading async
                           mode is used.
        :param kwargs: Additional web server options. The web server options
                       are specific to the server used in each of the supported
                       async modes. Note that options provided here will
                       not be seen when using an external web server such
                       as gunicorn, since this method is not called in that
                       case.
        """
        if host is None:
            host = '127.0.0.1'
        if port is None:
            server_name = app.config['SERVER_NAME']
            if server_name and ':' in server_name:
                port = int(server_name.rsplit(':', 1)[1])
            else:
                port = 5000

        debug = kwargs.pop('debug', app.debug)
        log_output = kwargs.pop('log_output', debug)
        use_reloader = kwargs.pop('use_reloader', debug)
        extra_files = kwargs.pop('extra_files', None)

        app.debug = debug
        if app.debug and self.server.eio.async_mode != 'threading':
            # put the debug middleware between the SocketIO middleware
            # and the Flask application instance
            #
            #    mw1   mw2   mw3   Flask app
            #     o ---- o ---- o ---- o
            #    /
            #   o Flask-SocketIO
            #    \  middleware
            #     o
            #  Flask-SocketIO WebSocket handler
            #
            # BECOMES
            #
            #  dbg-mw   mw1   mw2   mw3   Flask app
            #     o ---- o ---- o ---- o ---- o
            #    /
            #   o Flask-SocketIO
            #    \  middleware
            #     o
            #  Flask-SocketIO WebSocket handler
            #
            self.sockio_mw.wsgi_app = DebuggedApplication(
                self.sockio_mw.wsgi_app, evalex=True)

        if self.server.eio.async_mode == 'threading':
            from werkzeug._internal import _log
            _log(
                'warning', 'WebSocket transport not available. Install '
                'eventlet or gevent and gevent-websocket for '
                'improved performance.')
            app.run(host=host,
                    port=port,
                    threaded=True,
                    use_reloader=use_reloader,
                    **kwargs)
        elif self.server.eio.async_mode == 'eventlet':

            def run_server():
                import eventlet
                import eventlet.wsgi
                eventlet_socket = eventlet.listen((host, port))

                # If provided an SSL argument, use an SSL socket
                ssl_args = [
                    'keyfile', 'certfile', 'server_side', 'cert_reqs',
                    'ssl_version', 'ca_certs', 'do_handshake_on_connect',
                    'suppress_ragged_eofs', 'ciphers'
                ]
                ssl_params = {k: kwargs[k] for k in kwargs if k in ssl_args}
                if len(ssl_params) > 0:
                    for k in ssl_params:
                        kwargs.pop(k)
                    ssl_params['server_side'] = True  # Listening requires true
                    eventlet_socket = eventlet.wrap_ssl(
                        eventlet_socket, **ssl_params)

                eventlet.wsgi.server(eventlet_socket,
                                     app,
                                     log_output=log_output,
                                     **kwargs)

            if use_reloader:
                run_with_reloader(run_server, extra_files=extra_files)
            else:
                run_server()
        elif self.server.eio.async_mode == 'gevent':
            from gevent import pywsgi
            try:
                from geventwebsocket.handler import WebSocketHandler
                websocket = True
            except ImportError:
                websocket = False

            log = 'default'
            if not log_output:
                log = None
            if websocket:
                self.wsgi_server = pywsgi.WSGIServer(
                    (host, port),
                    app,
                    handler_class=WebSocketHandler,
                    log=log,
                    **kwargs)
            else:
                self.wsgi_server = pywsgi.WSGIServer((host, port),
                                                     app,
                                                     log=log)

            if use_reloader:
                # monkey patching is required by the reloader
                from gevent import monkey
                monkey.patch_all()

                def run_server():
                    self.wsgi_server.serve_forever()

                run_with_reloader(run_server, extra_files=extra_files)
            else:
                self.wsgi_server.serve_forever()
Example #45
0
from Models import User
from flask.ext.cors import CORS
from flask import send_file, send_from_directory
from flask import request, jsonify, abort, Response, g
from flask_socketio import SocketIO, join_room
from Auth import *
from Models import *
from DownloadManager import *
import json, urllib.request, urllib.error, urllib.parse, os, _thread
from multiprocessing import Process
from DownloadDaemon import starter
from EMail import send_mail
import sys

from gevent import monkey
monkey.patch_all(ssl=False)

server = Flask(__name__)
server.config['SECRET_KEY'] = "123456789"
socketio = SocketIO(server, debug=True, logger=True, engineio_logger=True, ping_timeout=600)
cors = CORS(server)
p = None
verbose = False

if len(sys.argv) == 2 and sys.argv[1] == '-v':
    verbose = True

def token_validator(token):
    user = verify_auth_token(token, server.config['SECRET_KEY'])
    if user != None:
        g.user = user
Example #46
0
from gevent import monkey
monkey.patch_all()  #指的是在运行时动态替换
import gevent
from urllib import request
import time


def f(url):
    print('GET: %s' % url)
    resp = request.urlopen(url)
    data = resp.read()
    print('%d bytes received from %s.' % (len(data), url))


start = time.time()

gevent.joinall([
    gevent.spawn(f, 'https://itk.org/'),
    gevent.spawn(f, 'https://www.github.com/'),
    gevent.spawn(f, 'https://zhihu.com/'),
])
print(time.time() - start)
Example #47
0
from gevent import monkey
monkey.patch_all(dns=False)
from .orchestra import monkey
monkey.patch_all()

import logging
import os
import sys


__version__ = '0.1.0'


# If we are running inside a virtualenv, ensure we have its 'bin' directory in
# our PATH. This doesn't happen automatically if scripts are called without
# first activating the virtualenv.
exec_dir = os.path.abspath(os.path.dirname(sys.argv[0]))
if os.path.split(exec_dir)[-1] == 'bin' and exec_dir not in os.environ['PATH']:
    os.environ['PATH'] = ':'.join((exec_dir, os.environ['PATH']))

# We don't need to see log entries for each connection opened
logging.getLogger('requests.packages.urllib3.connectionpool').setLevel(
    logging.WARN)
# if requests doesn't bundle it, shut it up anyway
logging.getLogger('urllib3.connectionpool').setLevel(
    logging.WARN)

logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s.%(msecs)03d %(levelname)s:%(name)s:%(message)s')
log = logging.getLogger(__name__)