Esempio n. 1
0
 def get_driver():
     """
     :return: DNSBase()
     """
     module, obj = CONF.dns.driver.rsplit('.', 1)
     module = eventlet.import_patched(module)
     return getattr(module, obj)()
Esempio n. 2
0
    def __call__(self):
        logger.info('Called obligations ROD server')
        res = {}
        xmlrpclib = eventlet.import_patched('xmlrpclib')

        with eventlet.timeout.Timeout(SOCKET_TIMEOUT):
            try:
                server = xmlrpclib.Server(ROD_SERVER)
                result = server.WebRODService.getActivities()
            except Exception as err:
                logger.exception(err)
                result = []

        for obligation in result:
            key = int(obligation.get('PK_RA_ID'))
            title = formatTitle(obligation.get('TITLE'))
            try:
                title = title.decode('utf-8')
            except UnicodeEncodeError as err:
                logger.warning("Obligation title found as unicode: %s. %s",
                               title, err)
            res[key] = {
                "title": title,
                "source_title": obligation.get("SOURCE_TITLE", title),
                "source_id": obligation.get("PK_SOURCE_ID", key)
            }
        return res
Esempio n. 3
0
    def bulk_import_asynchronously(self, original_parsed_feed, h, feed_urls,
                                   feed):
        # This asynchronous bulk_import is a parallelism monster.

        # We do as much network I/O as we can using eventlet,
        # rather than threads or subprocesses.
        httplib2 = eventlet.import_patched('httplib2')

        httppool = eventlet.pools.Pool(max_size=200)
        httppool.create = lambda: httplib2.Http(DEFAULT_HTTPLIB_CACHE_PATH)

        pool = eventlet.GreenPool(100)

        def get_url(url):
            with httppool.item() as http:
                if self.verbose:
                    print 'getting', url
                try:
                    resp, content = http.request(url, 'GET')
                except httplib.BadStatusLine, e:
                    if not e.args[0]:  # timeout
                        resp, content = e, ''
                    else:
                        raise
                return (resp, content)
Esempio n. 4
0
def import_module(module):
    """
    Import ``module``. Import patched version if eventlet is used.
    """
    if uses_eventlet:
        return eventlet.import_patched(module)
    else:
        return __import__(module)
Esempio n. 5
0
 def get_backend(cls, *args, **kwargs):
     """
     :rtype: cls.__name__
     """
     module = CONF.rpc.driver
     LOG.debug('Load client from %s', module)
     module = eventlet.import_patched(module)
     return getattr(module, cls.__name__)(*args, **kwargs)
Esempio n. 6
0
def get_driver(worker_url=None):
    """
    :rtype: dao.control.worker.provisioning.foreman.ForemanDriver
    """
    module, obj = CONF.worker.provision_driver.rsplit('.', 1)
    LOG.info('Load %s from %s', obj, module)
    module = eventlet.import_patched(module)
    return getattr(module, obj)(worker_url)
Esempio n. 7
0
def import_module(module):
    """
    Import ``module``. Import patched version if eventlet is used.
    """
    if uses_eventlet:
        return eventlet.import_patched(module)
    else:
        return __import__(module)
Esempio n. 8
0
def get_driver(worker_url=None):
    """
    :rtype: dao.control.worker.provisioning.foreman.ForemanDriver
    """
    module, obj = CONF.worker.provision_driver.rsplit('.', 1)
    LOG.info('Load %s from %s', obj, module)
    module = eventlet.import_patched(module)
    return getattr(module, obj)(worker_url)
Esempio n. 9
0
 def get_backend(cls, *args, **kwargs):
     """
     :rtype: cls.__name__
     """
     module = CONF.rpc.driver
     LOG.debug('Load client from %s', module)
     module = eventlet.import_patched(module)
     return getattr(module, cls.__name__)(*args, **kwargs)
Esempio n. 10
0
    def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.add_route("/health", Health())
        self.add_route(_API_PREFIX + "/{scope}/fb/hooks", FacebookHooks())
        self.add_route(_API_PREFIX + "/hubspot/hooks", HubspotHooks())
        self.add_route(_API_PREFIX + "/proxy", CorsProxy())

        global requests
        requests = eventlet.import_patched("requests")
Esempio n. 11
0
 def get_helper(cls, worker=None):
     """
     :rtype: DHCPBase
     """
     if cls.instance:
         return cls.instance
     module, cls_name = CONF.dhcp.driver.rsplit('.', 1)
     module = eventlet.import_patched(module)
     cls_obj = getattr(module, cls_name)
     cls.instance = cls_obj(worker)
     return cls.instance
Esempio n. 12
0
 def get_helper(cls, worker=None):
     """
     :rtype: DHCPBase
     """
     if cls.instance:
         return cls.instance
     module, cls_name = CONF.dhcp.driver.rsplit('.', 1)
     module = eventlet.import_patched(module)
     cls_obj = getattr(module, cls_name)
     cls.instance = cls_obj(worker)
     return cls.instance
Esempio n. 13
0
 def get_hook(cls, server, db):
     """
     :rtype: HookBase
     """
     hook_path = server_helper.get_hook_path(server)
     hook_cls = cls.name2cls.get(hook_path)
     if hook_cls is None:
         module, cls_name = hook_path.rsplit('.', 1)
         module = eventlet.import_patched(module)
         hook_cls = getattr(module, cls_name)
         cls.name2cls[hook_path] = hook_cls
     return hook_cls(server, db)
Esempio n. 14
0
 def get_helper(cls, db):
     """
     :rtype: Base
     """
     path = CONF.worker.switch_lib
     switch_lib_cls = cls.name2cls.get(path)
     if switch_lib_cls is None:
         module, cls_name = path.rsplit('.', 1)
         module = eventlet.import_patched(module)
         switch_lib_cls = getattr(module, cls_name)
         cls.name2cls[path] = switch_lib_cls
     return switch_lib_cls(db)
Esempio n. 15
0
 def get_hook(cls, server, db):
     """
     :rtype: HookBase
     """
     hook_path = server_helper.get_hook_path(server)
     hook_cls = cls.name2cls.get(hook_path)
     if hook_cls is None:
         module, cls_name = hook_path.rsplit('.', 1)
         module = eventlet.import_patched(module)
         hook_cls = getattr(module, cls_name)
         cls.name2cls[hook_path] = hook_cls
     return hook_cls(server, db)
Esempio n. 16
0
def sentry_logger(conf, name, log_to_console, log_route, fmt, logger,
                  adapted_logger):
    sentry_dsn = conf.get('log_sentry_dsn', None)
    sentry_log_level = getattr(logging,
                               conf.get('log_sentry_level',
                                        'ERROR').upper(),
                               logging.ERROR)
    if sentry_dsn:
        raven_logging = eventlet.import_patched('raven.handlers.logging')
        sentry = raven_logging.SentryHandler(sentry_dsn)
        sentry.setLevel(sentry_log_level)
        logger.addHandler(sentry)
Esempio n. 17
0
 def get_helper(cls, db):
     """
     :rtype: Base
     """
     path = CONF.worker.switch_lib
     switch_lib_cls = cls.name2cls.get(path)
     if switch_lib_cls is None:
         module, cls_name = path.rsplit('.', 1)
         module = eventlet.import_patched(module)
         switch_lib_cls = getattr(module, cls_name)
         cls.name2cls[path] = switch_lib_cls
     return switch_lib_cls(db)
Esempio n. 18
0
def eventlet_main(filename, handler, count=3):
    import eventlet
    feedparser = eventlet.import_patched('feedparser')

    urls = list(load_urls(filename))

    def load_feed(feed_url):
        "This wrapper exists purly to silence feed loading errors"
        try:
            return feedparser.parse(feed_url)
        except Exception, e:
            print >> sys.stderr, "load", type(e)
            return None
Esempio n. 19
0
    def __call__(self):
        """ Return a struct with full info for obligation with id=rodid
        """
        rods = {}
        result = None
        xmlrpclib = eventlet.import_patched('xmlrpclib')

        with eventlet.timeout.Timeout(SOCKET_TIMEOUT):
            try:
                server = xmlrpclib.Server(ROD_SERVER)
                result = server.WebRODService.getROComplete()
            except Exception, err:
                logger.exception(err)
                result = []
Esempio n. 20
0
    def relatedReportingObligations(self):
        """ Return a list of Reporting Obligations related to this theme
        """
        current_theme = getTheme(self.context)
        catalog = getToolByName(self.context, 'portal_catalog')
        rodbaseurl = 'http://rod.eionet.europa.eu/obligations/'
        rods = []
        rodsdone = []
        now = DateTime()
        rodsinfo = {}
        result = None
        xmlrpclib = eventlet.import_patched('xmlrpclib')

        with eventlet.timeout.Timeout(SOCKET_TIMEOUT):
            try:
                server = xmlrpclib.Server(ROD_SERVER)
                result = server.WebRODService.getROComplete()
            except Exception, err:
                logger.exception(err)
                result = []
Esempio n. 21
0
    def handle(self, *args, **options):
        if len(args) != 1:
            raise CommandError('bulk_import takes one argument: '
                               '%i argument(s) given' % len(args))
        try:
            feed = models.Feed.objects.get(pk=args[0])
        except models.Feed.DoesNotExist:
            raise CommandError('Feed with pk %s does not exist' % args[0])

        try:
            self.verbose = (int(options['verbosity']) > 1)
        except (KeyError, ValueError):
            self.verbose = False

        httplib2 = eventlet.import_patched('httplib2')

        # Use httplib2 to GET the feed.
        # This permits us to download it only once, passing a parsed_feed through
        # to the vidscraper functions.
        h = httplib2.Http(DEFAULT_HTTPLIB_CACHE_PATH)
        content = localtv.util.http_get(feed.feed_url, _httplib2=httplib2)

        parsed_feed = feedparser.parse(content)

        # Try to work asynchronously, calling feedparser ourselves. We can do that
        # if the importer supports bulk_import_url_list.
        try:
            feed_urls = bulk_import_url_list(parsed_feed=parsed_feed)
        except ValueError:
            return self.use_old_bulk_import(parsed_feed, feed)
        # Okay, good, we either got the feed_url list, or we passed the work
        # off the old-style function. Proceed.
        self.forked_tasks = {}
        # close the database connection when we start a new process; otherwise,
        # MySQL falls over because we corrupt the connection
        self.forked_task_worker_pool = multiprocessing.Pool(
            processes=8, initializer=close_connection)
        # start 8 worker processes. That should be fine, right?
        self.bulk_import_asynchronously(parsed_feed, h, feed_urls, feed)
        self.enqueue_forked_tasks_for_thumbnail_fetches(feed)
Esempio n. 22
0
    def handle(self, *args, **options):
        if len(args) != 1:
            raise CommandError('bulk_import takes one argument: '
                               '%i argument(s) given' % len(args))
        try:
            feed = models.Feed.objects.get(pk=args[0])
        except models.Feed.DoesNotExist:
            raise CommandError('Feed with pk %s does not exist' % args[0])

        try:
            self.verbose = (int(options['verbosity']) > 1)
        except (KeyError, ValueError):
            self.verbose = False

        httplib2 = eventlet.import_patched('httplib2')

        # Use httplib2 to GET the feed.
        # This permits us to download it only once, passing a parsed_feed through
        # to the vidscraper functions.
        h = httplib2.Http(DEFAULT_HTTPLIB_CACHE_PATH)
        content = localtv.util.http_get(feed.feed_url, _httplib2=httplib2)

        parsed_feed = feedparser.parse(content)

        # Try to work asynchronously, calling feedparser ourselves. We can do that
        # if the importer supports bulk_import_url_list.
        try:
            feed_urls = bulk_import_url_list(parsed_feed=parsed_feed)
        except ValueError:
            return self.use_old_bulk_import(parsed_feed, feed)
        # Okay, good, we either got the feed_url list, or we passed the work
        # off the old-style function. Proceed.
        self.forked_tasks = {}
        # close the database connection when we start a new process; otherwise,
        # MySQL falls over because we corrupt the connection
        self.forked_task_worker_pool =  multiprocessing.Pool(processes=8,
                                                             initializer=close_connection)
        # start 8 worker processes. That should be fine, right?
        self.bulk_import_asynchronously(parsed_feed, h, feed_urls, feed)
        self.enqueue_forked_tasks_for_thumbnail_fetches(feed)
Esempio n. 23
0
    def bulk_import_asynchronously(self, original_parsed_feed, h, feed_urls, feed):
        # This asynchronous bulk_import is a parallelism monster.

        # We do as much network I/O as we can using eventlet,
        # rather than threads or subprocesses.
        httplib2 = eventlet.import_patched('httplib2')

        httppool = eventlet.pools.Pool(max_size=200)
        httppool.create = lambda: httplib2.Http(DEFAULT_HTTPLIB_CACHE_PATH)

        pool = eventlet.GreenPool(100)

        def get_url(url):
            with httppool.item() as http:
                if self.verbose:
                    print 'getting', url
                try:
                    resp, content = http.request(url, 'GET')
                except httplib.BadStatusLine, e:
                    if not e.args[0]: # timeout
                        resp, content = e, ''
                    else:
                        raise
                return (resp, content)
Esempio n. 24
0
import types

import eventlet
from eventlet import tpool
from oslo_concurrency import lockutils
from oslo_concurrency import processutils
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import reflection

from os_win._i18n import _LE
from os_win import exceptions

LOG = logging.getLogger(__name__)

socket = eventlet.import_patched('socket')
synchronized = lockutils.synchronized_with_prefix('oswin-')

_WBEM_E_NOT_FOUND = 0x80041002


def execute(*cmd, **kwargs):
    """Convenience wrapper around oslo's execute() method."""
    return processutils.execute(*cmd, **kwargs)


def parse_server_string(server_str):
    """Parses the given server_string and returns a tuple of host and port.

    If it's not a combination of host part and port, the port element
    is an empty string. If the input is invalid expression, return a tuple of
Esempio n. 25
0
    make all calls through httplib.
"""

from swift.common import constraints
import logging
import time
import socket

import eventlet
from eventlet.green.httplib import CONTINUE, HTTPConnection, HTTPMessage, \
    HTTPResponse, HTTPSConnection, _UNKNOWN
from six.moves.urllib.parse import quote, parse_qsl, urlencode
import six

if six.PY2:
    httplib = eventlet.import_patched('httplib')
    from eventlet.green import httplib as green_httplib
else:
    httplib = eventlet.import_patched('http.client')
    from eventlet.green.http import client as green_httplib

# Apparently http.server uses this to decide when/whether to send a 431.
# Give it some slack, so the app is more likely to get the chance to reject
# with a 400 instead.
httplib._MAXHEADERS = constraints.MAX_HEADER_COUNT * 1.6
green_httplib._MAXHEADERS = constraints.MAX_HEADER_COUNT * 1.6


class BufferedHTTPResponse(HTTPResponse):
    """HTTPResponse class that buffers reading of headers"""
    def __init__(self,
Esempio n. 26
0
#!/usr/bin/env python
import tempfile, sys, os, time
from os import path
from greenlet import GreenletExit
import eventlet
from eventlet.green import socket
from eventlet.green import subprocess
murder_client = eventlet.import_patched('murder_client')
bttrack = eventlet.import_patched('BitTornado.BT1.track')
makemetafile = eventlet.import_patched('BitTornado.BT1.makemetafile')

PORT = 8998
REMOTE_PATH = '/tmp/herd'
DATA_FILE = './data'


def run(local_file, remote_file, hosts):
    start = time.time()
    print "Spawning tracker..."
    tracker = eventlet.spawn(track)
    eventlet.sleep(1)
    local_host = (local_ip(), PORT)
    print "Creating torrent (host %s:%s)..." % local_host
    torrent_file = mktorrent(local_file, '%s:%s' % local_host)
    print "Seeding"
    seeder = eventlet.spawn(seed, torrent_file, local_file)
    print "Transferring"
    if not os.path.isfile('./bittornado.tar.gz'):
        subprocess.call("tar cfz ./bittornado.tar.gz ./BitTornado".split(' '))
    pool = eventlet.GreenPool(100)
    threads = []
Esempio n. 27
0
# -*- coding: utf-8 -*-

import bunch
import simplejson as json
from graph import GraphException
from url_operations import add_path, update_query_params

import eventlet
requests = eventlet.import_patched('requests.__init__')
requests_adapters = eventlet.import_patched('requests.adapters')

session = requests.Session()
session.headers['Accept-encoding'] = 'gzip'
session.mount('http://', requests_adapters.HTTPAdapter(pool_connections=500, pool_maxsize=500))
session.mount('https://', requests_adapters.HTTPAdapter(pool_connections=500, pool_maxsize=500))

class FQL(object):
    
    """
    A maker of single and multiple FQL queries.
    
    Usage
    =====
    
    Single queries:
    
        >>> q = FQL('access_token')
        >>> result = q("SELECT post_id FROM stream WHERE source_id = ...")
        >>> result
        [Bunch(post_id='XXXYYYZZZ'), ...]
        
Esempio n. 28
0
# limitations under the License.

import atexit
import confluent.exceptions as exc
import confluent.interface.console as conapi
import confluent.messages as msg
import confluent.util as util
import eventlet
import eventlet.event
import eventlet.green.threading as threading
import eventlet.greenpool as greenpool
import eventlet.queue as queue
import eventlet.support.greendns
import pyghmi.constants as pygconstants
import pyghmi.exceptions as pygexc
console = eventlet.import_patched('pyghmi.ipmi.console')
ipmicommand = eventlet.import_patched('pyghmi.ipmi.command')
import socket

console.session.select = eventlet.green.select
console.session.threading = eventlet.green.threading
console.session.socket.getaddrinfo = eventlet.support.greendns.getaddrinfo


def exithandler():
    console.session.iothread.join()

atexit.register(exithandler)

_ipmiworkers = greenpool.GreenPool()
Esempio n. 29
0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import errno
import eventlet
webclient = eventlet.import_patched('pyghmi.util.webclient')

class NodeHandler(object):
    https_supported = True
    is_enclosure = False
    devname = ''
    maxmacs = 2  # reasonable default, allowing for common scenario of
                 # shared nic in theory, but blocking enclosure managers
                 # and uplink ports

    def __init__(self, info, configmanager):
        self._certfailreason = None
        self._fp = None
        self.info = info
        self.configmanager = configmanager
        targsa = [None]
Esempio n. 30
0
import os
import errno
import socket
import time
import eventlet
import unittest
httplib2 = eventlet.import_patched("httplib2")
from eventlet.timeout import Timeout
from ..loadbalancer import Balancer
from ..actions import Empty, Static, Unknown, NoHosts, Redirect, Proxy, Spin


class MockBalancer(object):
    "Fake Balancer class for testing."

    def __init__(self, fixed_action=None):
        self.fixed_action = None
        self.static_dir = "/tmp/"

    def resolve_host(self, host):
        return self.fixed_action


class MockSocket(object):
    "Fake Socket class that remembers what was sent. Doesn't implement sendfile."

    def __init__(self):
        self.data = ""

    def send(self, data):
        self.data += data
Esempio n. 31
0
from functools import wraps

import sqlalchemy as salch
import eventlet
from eventlet import wsgi
from flask import Flask, jsonify, request, abort
from flask_socketio import SocketIO, send as ws_send, emit as ws_emit

from . import dbutil
from . import util
from .consts import DbLastScanCacheType, DbScanType
from .dbutil import DbKeychestAgent, DbWatchTarget, DbLastScanCache, DbWatchService, DbBaseDomain, DbHelper
from .trace_logger import Tracelogger

eventlet.monkey_patch(socket=True)
redis = eventlet.import_patched('redis')
flask_sse = eventlet.import_patched('flask_sse')

__author__ = 'dusanklinec'
LOCAL_REDIS = 'redis://localhost:6379'
logger = logging.getLogger(__name__)


class ServerSentEventsBlueprint(flask_sse.ServerSentEventsBlueprint):
    """
    HTTP/2 SSE
    """
    @property
    def redis(self):
        return redis.StrictRedis.from_url(LOCAL_REDIS)
Esempio n. 32
0
#License#
#bitHopper by Colin Rice is licensed under a Creative Commons 
# Attribution-NonCommercial-ShareAlike 3.0 Unported License.
#Based on a work at github.com.

import json
import base64
import traceback

import eventlet
httplib2 = eventlet.import_patched('httplib20_7_1')
from eventlet import pools
from eventlet.green import socket

# Global timeout for sockets in case something leaks
socket.setdefaulttimeout(900)

import webob

class Work():
    def __init__(self, bitHopper):
        self.bitHopper = bitHopper
        self.i = 0
        self.connect_pool = {}
        #pools.Pool(min_size = 2, max_size = 10, create = lambda: httplib2.Http(disable_ssl_certificate_validation=True))

    def get_http(self, address, timeout=15):
        if address not in self.connect_pool:
            self.connect_pool[address] =  pools.Pool(min_size = 0, create = lambda: httplib2.Http(disable_ssl_certificate_validation=True, timeout=timeout))
        return self.connect_pool[address].item()
Esempio n. 33
0
"""A simple web server that accepts POSTS containing a list of feed urls,
and returns the titles of those feeds.
"""
import eventlet
feedparser = eventlet.import_patched('feedparser')

# the pool provides a safety limit on our concurrency
pool = eventlet.GreenPool()


def fetch_title(url):
    d = feedparser.parse(url)
    return d.feed.get('title', '')


def app(environ, start_response):
    if environ['REQUEST_METHOD'] != 'POST':
        start_response('403 Forbidden', [])
        return []

    # the pile collects the result of a concurrent operation -- in this case,
    # the collection of feed titles
    pile = eventlet.GreenPile(pool)
    for line in environ['wsgi.input'].readlines():
        url = line.strip()
        if url:
            pile.spawn(fetch_title, url)
    # since the pile is an iterator over the results,
    # you can use it in all sorts of great Pythonic ways
    titles = '\n'.join(pile)
    start_response('200 OK', [('Content-type', 'text/plain')])
Esempio n. 34
0
#!/usr/bin/env python

import eventlet
from eventlet.green import urllib, urllib2
from stalker.stalker_utils import get_basic_auth

smtplib = eventlet.import_patched('smtplib')

try:
    import simplejson as json
except ImportError:
    import json


class PagerDuty(object):

    """Pagerduty Notifications"""

    def __init__(self, conf, logger, redis_client):
        self.conf = conf
        self.logger = logger
        self.rc = redis_client
        standard_service_key = conf.get('pagerduty_service_key')
        if not standard_service_key:
            raise Exception('No pagerduty standard service key in conf')
        crit_service_key = conf.get('pagerduty_critical_service_key')
        if not crit_service_key:
            crit_service_key = standard_service_key
        self.service_keys = {1: standard_service_key, 2: crit_service_key}
        self.url = conf.get('pagerduty_url', 'https://events.pagerduty.com/generic/2010-04-15/create_event.json')
        self.prefix = conf.get('pagerduty_incident_key_prefix', "")
Esempio n. 35
0
def _patch_package_module(module_name, additional_modules=None):
    """Patch a module residing in a package and install it in sys.modules.

    Upon exiting the context, the original contents of sys.modules will be
    restored.

    Packages frustrate eventlet's default patching strategy in many ways, some
    of which render it effectively unusable on package modules:
    * Module collisions: When patching the root of a package, all of its
    descendents must be expunged from sys.modules, otherwise relative imports
    raise an ImportError because the existing entries in sys.modules do not
    match the patched package.
    * Orphan modules: The patcher caches patched modules, but not their
    ancestors. If a new patch requires an already-patched module to be
    installed in sys.modules, the patched ancestors must also be installed,
    otherwise the sys.modules entry would be rootless.
    * Child hoisting: When the patcher unloads a child module and imports a
    patched version of it, the import system assigns the patched version
    as a member of its containing package, even if the __init__ module does
    not import the child. The patcher fails to undo package member assignments
    which occur as side-effects of importing patched modules, causing patched
    modules to leak into unpatched modules. In the worst case, exception
    classes from patched modules may be used instead of the unpatched versions,
    such that except statements fail to trap exceptions thrown by the unpatched
    modules.

    To address the deficits in eventlet's patching strategy, unload not only
    the module to be patched, but all currently loaded modules descending from
    the module's root package, then import a patched version of the module,
    thereby ensuring the module has a valid lineage separate from the original
    package.

    Note that creating an internally consistent, patched copy of a package, in
    which every module has been patched and contains only references to other
    patched modules, would be an onerous undertaking. Ensuring consistent
    references to a patched module throughout a package requires registering
    the patched module in sys.modules and loading every one of the package's
    other modules in dependency order. As packages can contain dependency
    cycles between the __init__ module and other modules, in which __init__
    imports other modules at the end of its body to expose certain symbols at
    the package level, package-level symbols would also need to be inspected
    and patched.
    """
    if additional_modules is None:
        additional_modules = []
    ancestors = list(utils.iter_ancestor_module_names(module_name))
    root_module = ancestors[0]
    # Initialize the saver prior to examining sys.modules in order to acquire
    # the import lock, preventing other threads from mutating sys.modules
    # and other shared state during the patching process.
    saver = eventlet.patcher.SysModulesSaver()
    try:
        # The implementation does not support nesting patches of modules in the
        # same root package. Properly supporting such nested patches would
        # require not resetting the entire package state in sys.modules, so
        # that modules containing classes such as exceptions can be shared
        # within the package.
        if root_module in _currently_patched_packages:
            raise RuntimeError(
                'Cannot patch module %s in root package %s'
                ' while module %s is already patched' % (
                    module_name, root_module,
                    _currently_patched_packages[root_module]))
        _currently_patched_packages[root_module] = module_name
        try:
            original_modules = set(sys.modules)
            original_modules.add(module_name)
            saver.save(*original_modules)
            # Determine which modules (apart from the target module) the
            # patching process will add to sys.modules, and freeze their state.
            # Avoid freezing all of sys.modules, as eventlet caches patched
            # modules in sys.modules.
            importlib.import_module(module_name)
            new_modules = set(sys.modules) - original_modules
            utils.delete_sys_modules(new_modules)
            saver.save(*new_modules)

            utils.delete_sys_modules(
                list(utils.iter_descendent_module_names(root_module)))
            # Patch the target module and all of its ancestors, rather than
            # just the target module, because import_patched caches patched
            # modules, so subsequent calls to this function must restore the
            # cached module's ancestry tree to avoid creating a rootless
            # module.
            for name in ancestors:
                sys.modules[name] = eventlet.import_patched(name)
            # Due to patched module caching, patches only import a module's
            # dependencies the first time. Make repeat calls as consistent
            # as possible by deregistering all unpatched modules in the root
            # package from sys.modules.
            utils.delete_sys_modules(
                set(utils.iter_descendent_module_names(root_module)) -
                set(ancestors))
            yield sys.modules[module_name]
        finally:
            del _currently_patched_packages[root_module]
    finally:
        saver.restore()
Esempio n. 36
0
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.


import eventlet
from dao.common import config

clientv20 = eventlet.import_patched('neutronclient.v2_0.client')


CONF = config.get_config()


def get_client():
    neutron = clientv20.Client(
        auth_url=CONF.openstack.auth_url,
        region_name=CONF.openstack.region,
        username=CONF.openstack.username,
        password=CONF.openstack.password,
        tenant_name=CONF.openstack.project,
        insecure=CONF.openstack.insecure)
    return neutron
Esempio n. 37
0
def app():
    wireUpBlpapiImplementation(eventlet.import_patched("blpapi_simulator"))
    my_app.register_blueprint(dev.blueprint, url_prefix='/dev')
    app = my_app.test_client()
    app.testing = True
    return app
Esempio n. 38
0
import re
import eventlet
import logging
import datetime
from eventlet import pools
from eventlet.green import urllib
from django.conf import settings
import os
from importer.api.eventful import conf

httplib2 = eventlet.import_patched('httplib2')
from hashlib import md5
from django.utils import simplejson

IMG_SIZE_RE = re.compile('(?<=/images/)\w+(?=/)', re.I)
IMG_EXT_RE = re.compile(
    '/(?P<name>\d+-\d+).(?P<ext>jpg|jpeg|tif|tiff|png|gif)$', re.I)


class APIError(Exception):
    pass


class API(object):
    CALL_COUNT = 0
    logger = logging.getLogger('importer.eventful')

    def __init__(self,
                 app_key=conf.API_KEY,
                 server='api.eventful.com',
                 make_dumps=False):
Esempio n. 39
0
# -*- encoding: utf-8 -*-

import random
import types
from cStringIO import StringIO

from M2Crypto import RSA, SSL, X509

import eventlet
from eventlet.green import os, httplib, socket
from eventlet.green.subprocess import Popen, PIPE
tempfile = eventlet.import_patched("tempfile")

from pilot.lib import tools, urlparse
import pilot.spooler

from pilot_cli import proxylib

def quote(s):
    """return a GRAM5 string, quoted and escaped if needed"""
    forbidden_chars = "+&|()=<>!\"'^#$"
    should_quote = False
    for c in forbidden_chars:
        if c in s:
            should_quote = True
            break

    if should_quote:
        return '"' + s.replace('"', '""') + '"'
    else:
        return s
Esempio n. 40
0
def eventlet_import_monkey_patched(module):
    """Returns module monkey patched by eventlet.

    It's needed for some tests, for example, context test.
    """
    return eventlet.import_patched(module, **EVENTLET_MONKEY_PATCH_MODULES)
Esempio n. 41
0
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import eventlet, sys
from eventlet.corolocal import local
from eventlet.green import time
from contextlib import contextmanager
from .sentinel import Sentinel

traceback = eventlet.import_patched('traceback')
output = eventlet.import_patched('forge.output')
"""A sentinal value used to indicate that the task is not yet complete."""
PENDING = Sentinel("PENDING")
"""A sentinal value used to indicate that the task terminated with an error of some kind."""
ERROR = Sentinel("ERROR")


class ChildError(Exception):
    """
    Used to indicate that a background task has had an error. The
    details are reported at the source of the error, so this error
    message is intentionally sparse.
    """
    def __init__(self, parent, *children):
        self.parent = parent
Esempio n. 42
0
#!/usr/bin/env python
# -*- encoding: utf-8 -*-

'''
Insert a lot of dummy traffic data into a memcache cluster.

Created on Mar 29, 2013

@author: Kotaimen
'''

import eventlet
eventlet.monkey_patch(socket=True, select=True)
memcache = eventlet.import_patched('memcache')

import multiprocessing
import threading

import json
import math
import random
import time
import itertools
import datetime
import re

# Random words for payload, note: this comes from "import this"
WORDS = list(set(w.lower() for w in re.split(r'\W+', '''
The Zen of Python, by Tim Peters

Beautiful is better than ugly.
Esempio n. 43
0
    make all calls through httplib.
"""

from swift import gettext_ as _
from swift.common import constraints
import logging
import time
import socket

import eventlet
from eventlet.green.httplib import CONTINUE, HTTPConnection, HTTPMessage, \
    HTTPResponse, HTTPSConnection, _UNKNOWN
from six.moves.urllib.parse import quote
import six

httplib = eventlet.import_patched('httplib')
httplib._MAXHEADERS = constraints.MAX_HEADER_COUNT


class BufferedHTTPResponse(HTTPResponse):
    """HTTPResponse class that buffers reading of headers"""
    def __init__(self,
                 sock,
                 debuglevel=0,
                 strict=0,
                 method=None):  # pragma: no cover
        self.sock = sock
        # sock is an eventlet.greenio.GreenSocket
        # sock.fd is a socket._socketobject
        # sock.fd._sock is a socket._socket object, which is what we want.
        self._real_socket = sock.fd._sock
Esempio n. 44
0
import os
import errno
import socket
import time
import eventlet
import unittest

httplib2 = eventlet.import_patched("httplib2")
from eventlet.timeout import Timeout
from ..loadbalancer import Balancer
from ..actions import Empty, Static, Unknown, NoHosts, Redirect, Proxy, Spin


class MockBalancer(object):
    "Fake Balancer class for testing."

    def __init__(self, fixed_action=None):
        self.fixed_action = None
        self.static_dir = "/tmp/"

    def resolve_host(self, host):
        return self.fixed_action


class MockSocket(object):
    "Fake Socket class that remembers what was sent. Doesn't implement sendfile."

    def __init__(self):
        self.data = ""

    def send(self, data):
Esempio n. 45
0
def get_driver():
    module, obj = CONF.worker.orchestration_driver.rsplit('.', 1)
    LOG.info('Load %s from %s', obj, module)
    module = eventlet.import_patched(module)
    return getattr(module, obj)()
Esempio n. 46
0
#         http://www.apache.org/licenses/LICENSE-2.0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.

import base64

import eventlet
from oslo_serialization import jsonutils

from neutron.plugins.vmware.vshield.common import exceptions

httplib2 = eventlet.import_patched('httplib2')


def xmldumps(obj):
    config = ""
    if isinstance(obj, dict):
        for key, value in obj.iteritems():
            cfg = "<%s>%s</%s>" % (key, xmldumps(value), key)
            config += cfg
    elif isinstance(obj, list):
        for value in obj:
            config += xmldumps(value)
    else:
        config = obj

    return config
Esempio n. 47
0
from __future__ import unicode_literals

import coloredlogs
import logging
import argparse
import random
import string
from flask import Flask

import eventlet
from eventlet import wsgi

# Uncomment for eventlet
# eventlet.monkey_patch()
# flask_sse = eventlet.import_patched('flask_sse')
redis = eventlet.import_patched('redis')

# Uncomment for gunicorn
# import flask_sse

from collections import OrderedDict
from flask import Blueprint, request, current_app, json, stream_with_context
import six

logger = logging.getLogger(__name__)
coloredlogs.install(level=logging.DEBUG)

# import redis
# import redis.connection
pool = redis.BlockingConnectionPool(
    connection_class=redis.connection.Connection,
Esempio n. 48
0
'''
Unlike the demo on the web page, this actually works.
The parallel is indeed parallel and indeed goes faster.
'''

from demo_deco import timeit
from urls import urls

import eventlet
requests = eventlet.import_patched('requests')


def fetch(url):
    return requests.get(url)

@timeit
def parallel(urls):
    pool = eventlet.GreenPool()
    for body in pool.imap(fetch, ['http://'+u for u in urls]):
        print body.url


@timeit
def sequential(urls):
    for url in urls:
        body = fetch('http://'+url)
        print body.url



#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.
#
"""Fujitsu C-Fabric Driver implements CLI over telnet for
Neutron network life-cycle management.
"""

import re
import select
import time

import eventlet
telnetlib = eventlet.import_patched('telnetlib')

from oslo_config import cfg
try:
    from oslo_log import log as logging
except ImportError:
    from neutron.openstack.common import log as logging
try:
    from oslo_utils import excutils
except ImportError:
    from neutron.openstack.common import excutils

from networking_fujitsu.ml2.drivers.fujitsu.common import utils as fj_util
from neutron.common import utils
from neutron.plugins.ml2.common import exceptions as ml2_exc
import oslo_i18n
Esempio n. 50
0
import eventlet
pigpio = eventlet.import_patched('pigpio')
import robovinci.pins as pins

LEFT = 0
RIGHT = 1

_PINS = {
    LEFT: [
        pins.MOTOR_LEFT_FORWARD,
        pins.MOTOR_LEFT_REVERSE,
        pins.MOTOR_LEFT_PWM,
        ],
    RIGHT: [
        pins.MOTOR_RIGHT_FORWARD,
        pins.MOTOR_RIGHT_REVERSE,
        pins.MOTOR_RIGHT_PWM,
        ],
    }

class _Updater(object):
    def __init__(self, side, ttime=2):
        forward, reverse, pwm = _PINS[side]
        self._pin_forward = forward
        self._pin_reverse = reverse
        self._pin_pwm = pwm
        self._current = 0
        self._target = 0
        self._delay = ttime / 200.0
        self._step = 200.0 / ttime
        self._thread = eventlet.spawn_n(self._update)
Esempio n. 51
0
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import eventlet, functools, sys, os
from contextlib import contextmanager
from eventlet.corolocal import local
from eventlet.green import time
from .sentinel import Sentinel

logging = eventlet.import_patched('logging')
traceback = eventlet.import_patched('traceback')
datetime = eventlet.import_patched('datetime')
output = eventlet.import_patched('forge.output')
emod = eventlet.import_patched('forge.executor')
executor = emod.executor
Result = emod.Result


# XXX: need better default for logfile
def setup(logfile=None):
    """
    Setup the task system. This will perform eventlet monkey patching as well as set up logging.
    """

    if not logfile:
Esempio n. 52
0
import os
import random
from datetime import datetime
from urlparse import urlparse

import eventlet

requests = eventlet.import_patched('requests.__init__')
time = eventlet.import_patched('time')
import redis

from bs4 import BeautifulSoup
from requests.exceptions import RequestException

import settings

num_requests = 0

redis = redis.StrictRedis(host=settings.redis_host,
                          port=settings.redis_port,
                          db=settings.redis_db)


def make_request(url, return_soup=True):
    # global request building and response handling

    url = format_url(url)

    if "picassoRedirect" in url:
        return None  # skip the redirect URLs
Esempio n. 53
0
def wireUpDevelopmentDependencies():
    global blpapi
    blpapi = eventlet.import_patched("blpapi_simulator")
    app.register_blueprint(dev.blueprint, url_prefix='/dev')
Esempio n. 54
0
import eventlet
import dns
import dns.rdataclass
import dns.rdatatype
import dns.exception
import dns.flags
import dns.rcode
import dns.message
import dns.opcode
from oslo_config import cfg
from oslo_log import log as logging

from designate.mdns import base
from designate.metrics import metrics

dns_query = eventlet.import_patched('dns.query')

LOG = logging.getLogger(__name__)
CONF = cfg.CONF


class NotifyEndpoint(base.BaseEndpoint):
    RPC_API_VERSION = '2.0'
    RPC_API_NAMESPACE = 'notify'

    def notify_zone_changed(self, context, zone, host, port, timeout,
                            retry_interval, max_retries, delay):
        """
        :param context: The user context.
        :param zone: The designate zone object.  This contains the zone
            name.
Esempio n. 55
0
#!/usr/bin/env python3

import re
import uuid
import pathlib
import eventlet
import subprocess

from urllib.parse import urlparse
from eventlet.green.subprocess import Popen
from flask import Flask, send_from_directory, jsonify, request
from flask_socketio import SocketIO, emit
from flask_cors import CORS

requests = eventlet.import_patched('requests')

app = Flask(__name__)
app.config.from_json('config.json')
socketio = SocketIO(app,
                    cors_allowed_origins='*',
                    logger=True,
                    engineio_logger=True,
                    manage_session=True)
cors = CORS(app, resources={r'/retrieve/*': {'origins': '*'}})


def is_valid_url(url):
    pattern = re.compile(
        r'^https?://'
        r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|'
        r'localhost|\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})'
Esempio n. 56
0
#      contributors may be used to endorse or promote products derived
#      from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL DONGSHENG CAI BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

import eventlet
requests = eventlet.import_patched('requests.__init__')
import requests
import time
import xml.etree.ElementTree as ET
from cStringIO import StringIO
try:
    register_namespace = ET.register_namespace
except AttributeError:
    def register_namespace(prefix, uri):
        ET._namespace_map[uri] = prefix


class WNSException(Exception): pass


class WNSInvalidPushTypeException(WNSException):
Esempio n. 57
0
import dns.flags
import dns.rcode
import dns.message
import dns.opcode
from oslo_config import cfg
from oslo_log import log as logging

from designate.i18n import _LI
from designate.i18n import _LW
from designate.backend import base
from designate import exceptions
from designate.mdns import rpcapi as mdns_api
from designate import objects


dns_query = eventlet.import_patched('dns.query')

LOG = logging.getLogger(__name__)
CONF = cfg.CONF


class AgentPoolBackend(base.Backend):
    __plugin_name__ = 'agent'

    __backend_status__ = 'untested'

    def __init__(self, target):
        super(AgentPoolBackend, self).__init__(target)
        self.host = self.options.get('host')
        self.port = int(self.options.get('port'))
        self.timeout = CONF['service:pool_manager'].poll_timeout
Esempio n. 58
0
import simplejson as json
import urllib
#import oauth2 as oauth
import eventlet
oauth = eventlet.import_patched('oauth2')
import time

class Twitter_v_1_1:
    def __init__(self, consumer_key, consumer_secret, user_key, user_secret):
        consumer = oauth.Consumer(key=consumer_key, secret=consumer_secret)
        token = oauth.Token(key=user_key, secret=user_secret)
        self.client = oauth.Client(consumer, token)
    
    def request(self, url, method="GET", parameters=[], headers=None, body=''):
        if isinstance(parameters, dict):
            parameters = parameters.items()
        method = method.upper()
        if method == "POST"  and not body and parameters:
            body = urllib.urlencode(parameters)
        elif method == "GET" and parameters:
            url = url + '?' + urllib.urlencode(parameters)
        
        resp = None
        content = None
        clock = 0
        while content == None or (clock < 60 and resp.status >= 500):
            resp, content = self.client.request(url, method=method, body=body, headers=headers)
            if resp.status == 500:
                clock += 10
                time.sleep(10)
            elif resp.status == 502 or resp.status == 503:
Esempio n. 59
0
import eventlet

eventlet.monkey_patch(thread=False)
eventlet.import_patched('mongoengine')

import sys, traceback

import tensorflow as tf
from server3.service.model_service import run_model
from server3.business import job_business
from server3.business import staging_data_set_business
from server3.business import project_business
from server3.business import ownership_business
from server3.service.logger_service import emit_error
from server3.service.logger_service import emit_success
from server3.service.logger_service import save_job_status

FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string("job_id", "59ae047e0c11f35fafebc422",
                           "job object id")


def main(unused_argv):
    job_id = FLAGS.job_id
    if job_id == "59ae047e0c11f35fafebc422":
        raise ValueError('no job_id flag')
    job = job_business.get_by_job_id(job_id)
    # project id
    project_id = job.project.id
    project = project_business.get_by_id(project_id)
    ow = ownership_business.get_ownership_by_owned_item(project, 'project')