class Sandbox(object):

    def __init__(self):
        self._events = dict()
        self._logger = Logger()

    def invoke(self, event_name, request, stream):
        """ Connect worker and decorator """
        event_closure = self._events.get(event_name, None)
        if event_closure is not None:
            event_handler = event_closure()
            event_handler.invoke(request, stream)
        else:
            self._logger.warn("There is no handler for event %s" % event_name)

    def on(self, event_name, event_handler):
        try:
            # Try to construct handler.
            closure = event_handler()
        except Exception:
            # If this callable object is not our wrapper - may raise Exception
            closure = default(event_handler)()
            if hasattr(closure, "_wrapped"):
                event_handler = default(event_handler)
        else:
            if not hasattr(closure, "_wrapped"):
                event_handler = default(event_handler)
        self._events[event_name] = event_handler
Example #2
0
 def __init__(self, storage, key, namespace):
     self.storage = SecureStorage(storage)
     self.key = key
     self.logger = Logger()
     self.namespace = namespace_prefix + namespace
     self.dbnamespace = namespace_prefix + "apps"
     self.lognamespace = namespace_prefix + "logs"
     self.logger.info("UserDB has been initialized. Use namespace %s" %
                      self.namespace)
Example #3
0
    def __init__(self,
                 app_name,
                 addresses=None,
                 attempts=3,
                 delay=0.1,
                 max_delay=60.0,
                 delay_exp=2.0,
                 connect_timeout=None,
                 timeout=None,
                 logger=None):
        self.delay = delay
        self.max_delay = max_delay
        self.delay_exp = delay_exp
        self.connect_timeout = connect_timeout
        self.timeout = timeout
        self.attempts = attempts
        self.logger = logger or Logger()
        self._reset()

        addresses = addresses or ReconnectableService.DEFAULT_ADDRESS
        pairs = []
        for address in addresses.split(','):
            address_parts = address.split(':')
            host = address_parts[0]
            port = (len(address_parts) > 1 and int(address_parts[1])
                    or ReconnectableService.DEFAULT_PORT)
            pairs.append((host, port))
        self.addresses = itertools.cycle(pairs)

        self.app_name = app_name
        self.upstream = None
Example #4
0
    def __init__(self, init_args=sys.argv):
        self._logger = Logger()
        self._init_endpoint(init_args)

        self.sessions = dict()
        self.sandbox = Sandbox()

        self.service = ev.Service()

        self.disown_timer = ev.Timer(self.on_disown, 2, self.service)
        self.heartbeat_timer = ev.Timer(self.on_heartbeat, 5, self.service)
        self.disown_timer.start()
        self.heartbeat_timer.start()

        self.pipe = Pipe(self.endpoint)
        self.service.bind_on_fd(self.pipe.fileno())

        self.decoder = Decoder()
        self.decoder.bind(self.on_message)

        self.w_stream = WritableStream(self.service, self.pipe)
        self.r_stream = ReadableStream(self.service, self.pipe)
        self.r_stream.bind(self.decoder.decode)


        self.service.register_read_event(self.r_stream._on_event, self.pipe.fileno())
        self._logger.debug("Worker with %s send handshake" % self.id)
        self._send_handshake()
Example #5
0
    def __init__(self, init_args=sys.argv, disown_timeout=2, heartbeat_timeout=20):
        self._logger = Logger()
        self._init_endpoint(init_args)

        self.sessions = dict()
        self.sandbox = Sandbox()

        self.loop = ev.Loop()

        self.disown_timer = ev.Timer(self.on_disown, disown_timeout, self.loop)
        self.heartbeat_timer = ev.Timer(self.on_heartbeat, heartbeat_timeout, self.loop)
        self.disown_timer.start()
        self.heartbeat_timer.start()

        self.pipe = Pipe(self.endpoint)
        self.loop.bind_on_fd(self.pipe.fileno())

        self.decoder = Decoder()
        self.decoder.bind(self.on_message)

        self.w_stream = WritableStream(self.loop, self.pipe)
        self.r_stream = ReadableStream(self.loop, self.pipe)
        self.r_stream.bind(self.decoder.decode)

        self.loop.register_read_event(self.r_stream._on_event, self.pipe.fileno())
        self._logger.debug("Worker with %s send handshake" % self.id)
        # Send both messages - to run timers properly. This messages will be sent
        # only after all initialization, so they have same purpose.
        self._send_handshake()
        self._send_heartbeat()
 def __init__(self):
     self._logger = Logger()
     self.cache = list()
     self._clbk = None   # Callback - on chunk
     self._errbk = None  # Errorback - translate error to handler
     self._errmsg = None  # Store message
     self._state = 1      # Status of stream (close/open)
Example #7
0
 def __init__(self, storage, key, namespace):
     self.storage = SecureStorage(storage)
     self.key = key
     self.logger = Logger()
     self.namespace = namespace_prefix + namespace
     self.dbnamespace = namespace_prefix + "apps"
     self.lognamespace = namespace_prefix + "logs"
     self.logger.info("UserDB has been initialized. Use namespace %s"
                      % self.namespace)
Example #8
0
 def __init__(self, **config):
     self.logger = Logger()
     self.place = None
     self.tablename = ''
     try:
         unix_socket = config.get('MysqlSocket',
                                  "/var/run/mysqld/mysqld.sock")
         self.dbname = config.get('local_db_name', 'COMBAINE')
         self.user = config.get('user', 'root')
         self.password = config.get('password', "")
         self.db = MySQLdb.connect(unix_socket=unix_socket,
                                   user=self.user,
                                   passwd=self.password)
         self.cursor = self.db.cursor()
         self.cursor.execute('CREATE DATABASE IF NOT EXISTS %s' %
                             self.dbname)
         self.db.commit()
         self.db.select_db(self.dbname)
     except Exception as err:
         self.logger.error('Error in init MySQLdb %s' % err)
         raise Exception
Example #9
0
 def __init__(self, **config):
     self.logger = Logger()
     self.place = None
     self.tablename = ''
     try:
         # port = config.get('local_db_port', 3306)
         unix_socket = config.get('MysqlSocket',
                                  "/var/run/mysqld/mysqld.sock")
         self.dbname = config.get('local_db_name', 'COMBAINE')
         self.db = MySQLdb.connect(unix_socket=unix_socket, user='******', )
         self.cursor = self.db.cursor()
         self.cursor.execute('CREATE DATABASE IF NOT EXISTS %s' % self.dbname)
         self.db.commit()
         self.db.select_db(self.dbname)
     except Exception as err:
         self.logger.error('Error in init MySQLdb %s' % err)
         raise Exception
 def __init__(self):
     self._events = dict()
     self._logger = Logger()
Example #11
0
class MySqlDG(object):
    def __init__(self, **config):
        self.logger = Logger()
        self.place = None
        self.tablename = ''
        try:
            unix_socket = config.get('MysqlSocket',
                                     "/var/run/mysqld/mysqld.sock")
            self.dbname = config.get('local_db_name', 'COMBAINE')
            self.user = config.get('user', 'root')
            self.password = config.get('password', "")
            self.db = MySQLdb.connect(unix_socket=unix_socket,
                                      user=self.user,
                                      passwd=self.password)
            self.cursor = self.db.cursor()
            self.cursor.execute('CREATE DATABASE IF NOT EXISTS %s' %
                                self.dbname)
            self.db.commit()
            self.db.select_db(self.dbname)
        except Exception as err:
            self.logger.error('Error in init MySQLdb %s' % err)
            raise Exception

    def putData(self, data, tablename):
        try:
            tablename = tablename.replace('.',
                                          '_').replace('-',
                                                       '_').replace('+', '_')
            line = None
            fname = '/dev/shm/%s-%i' % ('COMBAINE', random.randint(0, 65535))
            with open(fname, 'w') as table_file:
                for line in data:
                    table_file.write('GOPA'.join([str(x)
                                                  for _, x in line]) + '\n')
                table_file.close()

                if not line:
                    self.logger.info("Data for mysql is missed")
                    os.remove(table_file.name)
                    return False

                self.logger.debug(
                    'Data has been written to a temporary file %s, size: %d bytes'
                    % (table_file.name, os.lstat(table_file.name).st_size))

            if not self._preparePlace(line):
                self.logger.error(
                    'Unsupported field types. Look at preparePlace()')
                return False

            self.cursor.execute('DROP TABLE IF EXISTS %s' % tablename)
            query = "CREATE TABLE IF NOT EXISTS `%(tablename)s` %(struct)s ENGINE = MEMORY DATA DIRECTORY='/dev/shm/'" % {
                'tablename': tablename,
                'struct': self.place
            }
            self.cursor.execute(query)
            self.db.commit()

            query = "LOAD DATA INFILE '%(filename)s' INTO TABLE `%(tablename)s` FIELDS TERMINATED BY 'GOPA'" % {
                'filename': table_file.name,
                'tablename': tablename
            }
            self.cursor.execute(query)
            self.db.commit()
            if os.path.isfile(table_file.name):
                os.remove(table_file.name)
        except Exception as err:
            self.logger.error('Error in putData %s' % err)
            if os.path.isfile(table_file.name):
                os.remove(table_file.name)
            return False
        else:
            self.tablename = tablename
            return True

    def _preparePlace(self, example):
        ftypes = {
            types.IntType: "BIGINT",
            types.UnicodeType: "VARCHAR(200)",
            types.StringType: "VARCHAR(200)",
            types.FloatType: "DOUBLE"
        }
        try:
            self.place = '( %s )' % ','.join([
                " `%s` %s" % (field_name, ftypes[type(field_type)])
                for field_name, field_type in example
            ])
        except Exception as err:
            self.logger.error('Error in preparePlace() %s' % err)
            self.place = None
            return False
        else:
            return True

    def perfomCustomQuery(self, query_string):
        self.logger.debug("Execute query: %s" % query_string)
        self.cursor.execute(query_string)
        _ret = self.cursor.fetchall()
        self.db.commit()
        return _ret
Example #12
0
class UserDB(object):
    def __init__(self, storage, key, namespace):
        self.storage = SecureStorage(storage)
        self.key = key
        self.logger = Logger()
        self.namespace = namespace_prefix + namespace
        self.dbnamespace = namespace_prefix + "apps"
        self.lognamespace = namespace_prefix + "logs"
        self.logger.info("UserDB has been initialized. Use namespace %s" %
                         self.namespace)

    @asynchronous
    def exists(self, name):
        try:
            yield self.storage.read(self.namespace, name)
        except Exception as err:
            self.logger.error(str(err))
            yield False
        else:
            yield True

    @asynchronous
    def get(self, name):
        yield self.storage.read(self.namespace, name)

    @asynchronous
    def create(self, info):
        user_info = dict()
        uid = uuid.uuid4().hex
        name = info['name']
        password = info['password']

        exists = yield self.exists(name)
        if exists:
            raise Exception("Already exists")
        # Store user uid
        user_info['uid'] = uid
        # Store username
        user_info['name'] = name
        # Crypt user passwd
        h = HMAC.new(uid)
        h.update(password)
        user_info['hash'] = h.hexdigest()
        try:
            yield self.storage.write(self.namespace, name, user_info, USER_TAG)
        except Exception as err:
            self.logger.error(str(err))
            yield False
        else:
            yield True

    @asynchronous
    def remove(self, name):
        try:
            self.logger.info("Remove user %s" % name)
            yield self.storage.remove(self.namespace, name)
        except Exception as err:
            self.logger.error(repr(err))

        try:
            self.logger.info("Remove user %s application info" % name)
            yield self.storage.remove(self.dbnamespace, name)
        except Exception as err:
            self.logger.error(repr(err))

        try:
            self.logger.info("Remove user %s upload logs" % name)
            tags = LOG_TAG + [name]
            logkeys = yield self.storage.find(self.lognamespace, tags)
            self.logger.debug("Uploadlogs keys %s" % logkeys)
            for key in logkeys:
                yield self.storage.remove(self.lognamespace, key)
        except Exception as err:
            self.logger.error(repr(err))

    @asynchronous
    def login(self, name, password):
        user_info = yield self.get(name)
        self.logger.info(str(user_info))
        h = HMAC.new(user_info['uid'])
        h.update(password)
        self.logger.error("%s %s" % (h.hexdigest(), user_info['hash']))
        if (h.hexdigest() == user_info['hash']):
            user_info.pop('uid')
            yield user_info
        else:
            raise Exception("Invalid pair of login/password")

    @asynchronous
    def users(self):
        yield self.storage.find(self.namespace, USER_TAG)

    @asynchronous
    def user_apps(self, user):
        apps = list()
        try:
            raw_apps = yield self.storage.read(self.dbnamespace, user)
            apps = msgpack.unpackb(raw_apps)
        except Exception as err:
            self.logger.error(repr(err))
        finally:
            yield apps

    @asynchronous
    def write_app_info(self, user, name):
        def handler(data):
            apps = list()
            if data is None:
                apps = list()
            else:
                apps = msgpack.unpackb(data)

            if name in apps:
                self.logger.error("App %s already exists" % name)
                return None

            apps.append(name)
            return msgpack.packb(apps)

        reader = partial(self.storage.read, self.dbnamespace, user)
        writer = lambda result: self.storage.write(self.dbnamespace, user,
                                                   result, USER_TAG)
        yield self.quasi_atomic_write(reader, writer, handler)

    @asynchronous
    def write_buildlog(self, user, key, logdata):
        tags = LOG_TAG + [user]
        yield self.storage.write(self.lognamespace, key, logdata, tags)

    @asynchronous
    def read_buildlog(self, key):
        yield self.storage.read(self.lognamespace, key)

    @asynchronous
    def list_buildlog(self, user):
        tags = [user] if user else LOG_TAG
        yield self.storage.find(self.lognamespace, tags)

    @asynchronous
    def quasi_atomic_write(self, reader, writer, handler):
        while True:
            data = None
            summ = ""
            try:
                data = yield reader()
                summ = hashlib.md5(data).hexdigest()
            except Exception as err:
                self.logger.error(repr(err))

            result = handler(data)
            if result is None:
                break

            try:
                data = yield reader()
            except Exception as err:
                self.logger.error(repr(err))

            if data is None or summ == hashlib.md5(data).hexdigest():
                self.logger.info("MD5 is still valid. Do write")
                yield writer(result)
                break

            self.logger.info("MD5 mismatchs. Continue")
Example #13
0
#!/usr/bin/env python
import msgpack

from cocaine.worker import Worker
from cocaine.logging import Logger

__author__ = 'EvgenySafronov <*****@*****.**>'

log = Logger()


def chunker(request, response):
    chunks = yield request.read()
    try:
        chunks = int(msgpack.loads(chunks))
    except ValueError:
        chunks = int(chunks)

    for num in xrange(chunks):
        response.write(msgpack.dumps('{0:-<1024}'.format(num)))
    response.write(msgpack.dumps('Done'))
    response.close()

W = Worker()
W.run({'chunkMe': chunker})
Example #14
0
import logging

from cocaine.logging import Logger


_cocaine_logger = Logger()


class CocaineHandler(logging.Handler):
    def __init__(self, level=logging.NOTSET):
        # do not work in python 2.6.5 - logging.Handler is an old-style class
        # super(CocaineHandler, self).__init__(level=level)
        logging.Handler.__init__(self, level=level)

    def emit(self, record):
        try:
            levelname = record.levelname.lower()
            # due to bug in cocaine.Logger
            if levelname == 'warning':
                levelname = 'warn'
            log = getattr(_cocaine_logger, levelname)
        except AttributeError:
            _cocaine_logger.warn('No appropriate method for log records '
                'of level "{0}"'.format(record.levelname))
            log = _cocaine_logger.info

        log(self.format(record))


root_logger = logging.getLogger('mm')
tornado_logger = logging.getLogger('tornado')
Example #15
0
import os
import sys
import imp

import logging
from time import time

import msgpack
# pylint: disable=import-error
from cocaine.worker import Worker
from cocaine.logging import Logger
from cocaine.logging.hanlders import CocaineHandler
# pylint: enable=import-error

Logger().info("Initialize custom.py")
PATH = os.environ.get('PLUGINS_PATH', '/usr/lib/yandex/combaine/custom')
sys.path.insert(0, PATH)
EXTS = [ext for (ext, _, _) in imp.get_suffixes()]


def _is_plugin(candidate):
    name, extension = os.path.splitext(candidate)
    return name != "__init__" and extension in EXTS


def _is_candidate(name):
    return not name.startswith("_") and name[0].isupper()


class Custom(object):
Example #16
0
class MySqlDG(object):

    def __init__(self, **config):
        self.logger = Logger()
        self.place = None
        self.tablename = ''
        try:
            # port = config.get('local_db_port', 3306)
            unix_socket = config.get('MysqlSocket',
                                     "/var/run/mysqld/mysqld.sock")
            self.dbname = config.get('local_db_name', 'COMBAINE')
            self.db = MySQLdb.connect(unix_socket=unix_socket, user='******', )
            self.cursor = self.db.cursor()
            self.cursor.execute('CREATE DATABASE IF NOT EXISTS %s' % self.dbname)
            self.db.commit()
            self.db.select_db(self.dbname)
        except Exception as err:
            self.logger.error('Error in init MySQLdb %s' % err)
            raise Exception

    def putData(self, data, tablename):
        try:
            tablename = tablename.replace('.', '_').replace('-', '_').replace('+', '_')
            line = None
            fname = '/dev/shm/%s-%i' % ('COMBAINE', random.randint(0, 65535))
            with open(fname, 'w') as table_file:
                for line in data:
                    table_file.write('GOPA'.join([str(x) for x in line.values()]) + '\n')
                table_file.close()

                if not line:
                    self.logger.info("Data for mysql is missed")
                    os.remove(table_file.name)
                    return False

                self.logger.debug('Data written to a temporary file %s, size: %d bytes'
                                  % (table_file.name, os.lstat(table_file.name).st_size))

            if not self._preparePlace(line):
                self.logger.error('Unsupported field types. Look at preparePlace()')
                return False

            self.cursor.execute('DROP TABLE IF EXISTS %s' % tablename)
            query = "CREATE TABLE IF NOT EXISTS %(tablename)s %(struct)s ENGINE = MEMORY DATA DIRECTORY='/dev/shm/'" % {'tablename': tablename,
                                                                                                                                  'struct': self.place}
            self.cursor.execute(query)
            self.db.commit()

            query = "LOAD DATA INFILE '%(filename)s' INTO TABLE %(tablename)s FIELDS TERMINATED BY 'GOPA'" % {'filename': table_file.name,
                                                                                                              'tablename': tablename}
            self.cursor.execute(query)
            self.db.commit()
            if os.path.isfile(table_file.name):
                os.remove(table_file.name)
        except Exception as err:
            self.logger.error('Error in putData %s' % err)
            if os.path.isfile(table_file.name):
                os.remove(table_file.name)
            return False
        else:
            self.tablename = tablename
            return True

    def _preparePlace(self, example):
        ftypes = {types.IntType: "INT",
                  types.UnicodeType: "VARCHAR(200)",
                  types.StringType: "VARCHAR(200)",
                  types.FloatType: "FLOAT"}
        try:
            self.place = '( %s )' % ','.join([" %s %s" % (field_name,
                                                          ftypes[type(field_type)])
                                             for field_name, field_type in example.items()])
        except Exception as err:
            self.logger.error('Error in preparePlace() %s' % err)
            self.place = None
            return False
        else:
            return True

    def perfomCustomQuery(self, query_string):
        self.logger.debug("Execute query: %s" % query_string)
        self.cursor.execute(query_string)
        _ret = self.cursor.fetchall()
        self.db.commit()
        return _ret

    def __del__(self):
        if self.db:
            self.cursor.close()
            self.db.commit()
            self.db.close()
Example #17
0
class Worker(object):

    def __init__(self, init_args=sys.argv, disown_timeout=2, heartbeat_timeout=20):
        self._logger = Logger()
        self._init_endpoint(init_args)

        self.sessions = dict()
        self.sandbox = Sandbox()

        self.loop = ev.Loop()

        self.disown_timer = ev.Timer(self.on_disown, disown_timeout, self.loop)
        self.heartbeat_timer = ev.Timer(self.on_heartbeat, heartbeat_timeout, self.loop)
        self.disown_timer.start()
        self.heartbeat_timer.start()

        self.pipe = Pipe(self.endpoint)
        self.loop.bind_on_fd(self.pipe.fileno())

        self.decoder = Decoder()
        self.decoder.bind(self.on_message)

        self.w_stream = WritableStream(self.loop, self.pipe)
        self.r_stream = ReadableStream(self.loop, self.pipe)
        self.r_stream.bind(self.decoder.decode)

        self.loop.register_read_event(self.r_stream._on_event, self.pipe.fileno())
        self._logger.debug("Worker with %s send handshake" % self.id)
        # Send both messages - to run timers properly. This messages will be sent
        # only after all initialization, so they have same purpose.
        self._send_handshake()
        self._send_heartbeat()

    def _init_endpoint(self, init_args):
        try:
            self.id = init_args[init_args.index("--uuid") + 1]
            app_name = init_args[init_args.index("--app") + 1]
            self.endpoint = init_args[init_args.index("--endpoint") + 1]
        except Exception as err:
            self._logger.error("Wrong cmdline arguments: %s " % err)
            raise RuntimeError("Wrong cmdline arguments")

    def run(self, binds=None):
        if not binds:
            binds = {}
        for event, name in binds.iteritems():
            self.on(event, name)
        self.loop.run()

    def terminate(self, reason, msg):
        self.w_stream.write(Message(message.RPC_TERMINATE, 0, reason, msg).pack())
        self.loop.stop()

    # Event machine
    def on(self, event, callback):
        self.sandbox.on(event, callback)

    # Events
    def on_heartbeat(self):
        self._send_heartbeat()

    def on_message(self, args):
        msg = Message.initialize(args)
        if msg is None:
            return
        elif msg.id == message.RPC_INVOKE:
            try:
                _request = Request()
                _stream = Stream(msg.session, self)
                self.sandbox.invoke(msg.event, _request, _stream)
                self.sessions[msg.session] = _request
            except Exception as err:
                self._logger.error("On invoke error: %s" % err)
                traceback.print_stack()

        elif msg.id == message.RPC_CHUNK:
            self._logger.debug("Receive chunk: %d" % msg.session)
            _session = self.sessions.get(msg.session, None)
            if _session is not None:
                try:
                    _session.push(msg.data)
                except Exception as err:
                    self._logger.error("On push error: %s" % str(err))

        elif msg.id == message.RPC_CHOKE:
            self._logger.debug("Receive choke: %d" % msg.session)
            _session = self.sessions.get(msg.session, None)
            if _session is not None:
                _session.close()
                self.sessions.pop(msg.session)

        elif msg.id == message.RPC_HEARTBEAT:
            self._logger.debug("Receive heartbeat. Stop disown timer")
            self.disown_timer.stop()

        elif msg.id == message.RPC_TERMINATE:
            self._logger.debug("Receive terminate. Reason: %s, message: %s " % (msg.reason, msg.message))
            self.terminate(msg.reason, msg.message)

        elif msg.id == message.RPC_ERROR:
            _session = self.sessions.get(msg.session, None)
            if _session is not None:
                _session.error(RequestError(msg.message))

    def on_disown(self):
        try:
            self._logger.error("Disowned")
        finally:
            self.loop.stop()

    # Private:
    def _send_handshake(self):
        self.w_stream.write(Message(message.RPC_HANDSHAKE, 0, self.id).pack())

    def _send_heartbeat(self):
        self.disown_timer.start()
        self._logger.debug("Send heartbeat. Start disown timer")
        self.w_stream.write(Message(message.RPC_HEARTBEAT, 0).pack())

    def send_choke(self, session):
        self.w_stream.write(Message(message.RPC_CHOKE, session).pack())

    def send_chunk(self, session, data):
        self.w_stream.write(Message(message.RPC_CHUNK, session, data).pack())

    def send_error(self, session, code, msg):
        self.w_stream.write(Message(message.RPC_ERROR, session, code, msg).pack())
#! /usr/bin/env python

from hashlib import sha512

from cocaine.worker import Worker
from cocaine.logging import Logger
from cocaine.services import Service
from cocaine.exceptions import *
from cocaine.decorators import http, fs


L = Logger()

urlfetcher_service = Service("urlfetch")
storage_service = Service("storage")

import sys

def example(request, response):
    L.info("INITIALIZE FUNCTION")
    try:
        ls = yield storage_service.list()
        L.info("From storage: %s" % str(ls))
    except ServiceError as err:
        L.error("S: %s" % err)
        ls = yield storage_service.list("manifests")
        L.info("From storage: %s" % str(ls))

    chunk_from_cloud = yield request.read()
    L.info("from dealer: %s" % chunk_from_cloud)
    try:
class Request(Future):

    def __init__(self):
        self._logger = Logger()
        self.cache = list()
        self._clbk = None   # Callback - on chunk
        self._errbk = None  # Errorback - translate error to handler
        self._errmsg = None  # Store message
        self._state = 1      # Status of stream (close/open)

    def push(self, chunk):
        if self._clbk is None:
            # If there is no attachment object, put chunk in the cache
            self._logger.debug("Cache chunk")
            self.cache.append(chunk)
        else:
            # Copy callback to temp, clear current callback and perform temp
            # Do it so because self._clbk may change,
            # while performing callback function.
            # Avoid double chunk sending to the task
            self._logger.debug("Send chunk to application")
            temp = self._clbk
            self._clbk = None
            temp(chunk)

    def error(self, errormsg):
        self._errmsg = errormsg

    def close(self):
        self._logger.debug("Close request")
        self._state = None
        if len(self.cache) == 0 and self._clbk is not None:
            self._logger.warn("Chunks are over,\
                                but the application requests them")
            if self._errbk is not None:
                self._logger.error("Throw error")
                self._errbk(RequestError("No chunks are available"))
            else:
                self._logger.error("No errorback. Can't throw error")

    def read(self):
        return chain.ChainFactory().then(lambda : self)

    def default_errorback(self, err):
        self._logger.error("No errorback.\
                Can't throw error: %s" % str(self._errmsg))

    def bind(self, callback, errorback=None, on_done=None):
        #self._logger.debug("Bind request")
        if len(self.cache) > 0:
            callback(self.cache.pop(0))
        elif self._errmsg is not None:
            if errorback is not None:
                errorback(self._errmsg)  # translate error into worker
            else:
                self.default_errorback(self._errmsg)
        elif self._state is not None:
            self._clbk = callback
            self._errbk = errorback or self.default_errorback
        else:
            # Stream closed by choke
            # Raise exception here because no chunks
            # from cocaine-runtime are available
            self._logger.warn("Chunks are over,\
                                but the application requests them")
            if errorback:
                errorback(RequestError("No chunks are available"))
Example #20
0
class UserDB(object):
    def __init__(self, storage, key, namespace):
        self.storage = SecureStorage(storage)
        self.key = key
        self.logger = Logger()
        self.namespace = namespace_prefix + namespace
        self.dbnamespace = namespace_prefix + "apps"
        self.lognamespace = namespace_prefix + "logs"
        self.logger.info("UserDB has been initialized. Use namespace %s"
                         % self.namespace)

    @asynchronous
    def exists(self, name):
        try:
            yield self.storage.read(self.namespace, name)
        except Exception as err:
            self.logger.error(str(err))
            yield False
        else:
            yield True

    @asynchronous
    def get(self, name):
        yield self.storage.read(self.namespace, name)

    @asynchronous
    def create(self, info):
        user_info = dict()
        uid = uuid.uuid4().hex
        name = info['name']
        password = info['password']

        exists = yield self.exists(name)
        if exists:
            raise Exception("Already exists")
        # Store user uid
        user_info['uid'] = uid
        # Store username
        user_info['name'] = name
        # Crypt user passwd
        h = HMAC.new(uid)
        h.update(password)
        user_info['hash'] = h.hexdigest()
        try:
            yield self.storage.write(self.namespace, name, user_info, USER_TAG)
        except Exception as err:
            self.logger.error(str(err))
            yield False
        else:
            yield True

    @asynchronous
    def remove(self, name):
        try:
            self.logger.info("Remove user %s" % name)
            yield self.storage.remove(self.namespace, name)
        except Exception as err:
            self.logger.error(repr(err))

        try:
            self.logger.info("Remove user %s application info" % name)
            yield self.storage.remove(self.dbnamespace, name)
        except Exception as err:
            self.logger.error(repr(err))

        try:
            self.logger.info("Remove user %s upload logs" % name)
            tags = LOG_TAG + [name]
            logkeys = yield self.storage.find(self.lognamespace, tags)
            self.logger.debug("Uploadlogs keys %s" % logkeys)
            for key in logkeys:
                yield self.storage.remove(self.lognamespace, key)
        except Exception as err:
            self.logger.error(repr(err))

    @asynchronous
    def login(self, name, password):
        user_info = yield self.get(name)
        self.logger.info(str(user_info))
        h = HMAC.new(user_info['uid'])
        h.update(password)
        self.logger.error("%s %s" % (h.hexdigest(), user_info['hash']))
        if (h.hexdigest() == user_info['hash']):
            user_info.pop('uid')
            yield user_info
        else:
            raise Exception("Invalid pair of login/password")

    @asynchronous
    def users(self):
        yield self.storage.find(self.namespace, USER_TAG)

    @asynchronous
    def user_apps(self, user):
        apps = list()
        try:
            raw_apps = yield self.storage.read(self.dbnamespace, user)
            apps = msgpack.unpackb(raw_apps)
        except Exception as err:
            self.logger.error(repr(err))
        finally:
            yield apps

    @asynchronous
    def write_app_info(self, user, name):
        def handler(data):
            apps = list()
            if data is None:
                apps = list()
            else:
                apps = msgpack.unpackb(data)

            if name in apps:
                self.logger.error("App %s already exists" % name)
                return None

            apps.append(name)
            return msgpack.packb(apps)

        reader = partial(self.storage.read, self.dbnamespace, user)
        writer = lambda result: self.storage.write(self.dbnamespace,
                                                   user,
                                                   result,
                                                   USER_TAG)
        yield self.quasi_atomic_write(reader, writer, handler)

    @asynchronous
    def write_buildlog(self, user, key, logdata):
        tags = LOG_TAG + [user]
        yield self.storage.write(self.lognamespace, key, logdata, tags)

    @asynchronous
    def read_buildlog(self, key):
        yield self.storage.read(self.lognamespace, key)

    @asynchronous
    def list_buildlog(self, user):
        tags = [user] if user else LOG_TAG
        yield self.storage.find(self.lognamespace, tags)

    @asynchronous
    def quasi_atomic_write(self, reader, writer, handler):
        while True:
            data = None
            summ = ""
            try:
                data = yield reader()
                summ = hashlib.md5(data).hexdigest()
            except Exception as err:
                self.logger.error(repr(err))

            result = handler(data)
            if result is None:
                break

            try:
                data = yield reader()
            except Exception as err:
                self.logger.error(repr(err))

            if data is None or summ == hashlib.md5(data).hexdigest():
                self.logger.info("MD5 is still valid. Do write")
                yield writer(result)
                break

            self.logger.info("MD5 mismatchs. Continue")
class UrlFetcher():
    def __init__(self, io_loop):
        self.io_loop = io_loop
        self.http_client = AsyncHTTPClient()
        self.logger = Logger()

    @chain.source
    def perform_request(self, request, response, method):
        try:
            constants = request_consts[method]

            url = request[constants.URL]
            timeout = request[constants.TIMEOUT]

            http_request = HTTPRequest(url=url, method=method)
            http_request.request_timeout = float(timeout)/1000

            if method == 'POST':
                http_request.body = request[constants.BODY]

            #adds cookies to request
            params_num = len(request)
            if constants.COOKIES <= params_num - 1:
                cookies = request[constants.COOKIES]
                if len(cookies) > 0:
                    list_of_cookies = list('{0}={1}'.format(cookie, value) for cookie, value in cookies.iteritems())
                    cookies_str = '; '.join(list_of_cookies)

                    http_request.headers.add('Cookie', cookies_str)

            #adds headers to request
            if constants.HEADERS <= params_num - 1:
                for name, values_list in request[constants.HEADERS].iteritems():
                    for value in values_list:
                        http_request.headers.add(name, value)

            self.logger.info("Downloading {0}, headers {1}, method {2}".format(url, http_request.headers, method))
            http_response = yield self.http_client.fetch(http_request)

            response_headers = self._get_headers_from_response(http_response)
            response.write((True, http_response.body, http_response.code, response_headers,))

            response.close()
            self.logger.info("{0} has been successfuly downloaded".format(url))
        except HTTPError as e:
            self.logger.info("Error ({0}) occured while downloading {1}".format(e.message, url))

            if e.response is not None:
                http_response = e.response
                response_headers = self._get_headers_from_response(http_response)
                response.write((False, http_response.body, http_response.code, response_headers,))
            else:
                response.write((False, '', e.code, {},))

            response.close()
        except socket.gaierror as e:
            self.logger.info("Error ({0}) occured while downloading {1}".format(e.message, url))
            response.write((False, '', e.errno, {},))
            response.close()
        except Exception as e:
            self.logger.error("Unhandled error ({0}) occured in perform_request, report about this problem "
                          "to httpclient service developers. Method is {1}, stacktrace is: {2}".format(
                                e.message, method, traceback.format_exc()))

            response.write((False, '', 0, {},))
            response.close()

    @chain.source
    def on_get_request(self, request, response):
        try:
            request_data_packed = yield request.read()
            request_data = msgpack.unpackb(request_data_packed)

            yield self.perform_request(request_data, response, 'GET')
        except StopIteration:
            pass
        except Exception as e:
            self.logger.error("Unhandled error ({0}) occured in on_get_request, report about this problem "
                              "to httpclient service developers. Stacktrace is: {1}".format(e.message, traceback.format_exc()))
            response.write((False, '', 0, {},))
            response.close()

    @chain.source
    def on_post_request(self, request, response):
        try:
            request_data_packed = yield request.read()
            request_data = msgpack.unpackb(request_data_packed)

            yield self.perform_request(request_data, response, 'POST')
        except StopIteration:
            pass
        except Exception as e:
            self.logger.error("Unhandled error ({0}) occured in on_post_request, report about this problem "
                              "to httpclient service developers. Stacktrace is: {1}".format(e.message, traceback.format_exc()))
            response.write((False, '', 0, {},))
            response.close()

    def _get_headers_from_response(self, http_response):
        response_headers = {}
        for header_tuple in http_response.headers.items():
            name = header_tuple[0]
            value = header_tuple[1]
            if not name in response_headers:
                response_headers[name] = []

            response_headers[name].append(value)

        return response_headers
Example #22
0
from functools import partial

from Crypto.Hash import HMAC
import msgpack

from cocaine.asio.engine import asynchronous
from cocaine.logging import Logger

namespace_prefix = "flow-users@"
USER_TAG = ["FLOW_USER"]
LOG_TAG = ["FLOW_UPLOAD_LOG"]

encoder = msgpack.packb
decoder = msgpack.unpackb

logger = Logger()


class SecureStorage(object):
    def __init__(self, storage):
        self.storage = storage

    @asynchronous
    def write(self, namespace, key, blob, tags):
        yield self.storage.write(namespace, key, encoder(blob), tags)

    @asynchronous
    def read(self, namespace, key):
        res = yield self.storage.read(namespace, key)
        yield decoder(res)
 def __init__(self, io_loop):
     self.io_loop = io_loop
     self.http_client = AsyncHTTPClient()
     self.logger = Logger()
Example #24
0
# (at your option) any later version.
#
# Combaine is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#

import logging

from cocaine.logging import Logger
from cocaine.logging.hanlders import CocaineHandler

Log = Logger()
Log.error("INITIALIZE")

l = logging.getLogger("combaine")
l.setLevel(logging.DEBUG)
ch = CocaineHandler()
formatter = logging.Formatter("%(tid)s %(message)s")
ch.setFormatter(formatter)
ch.setLevel(logging.DEBUG)
l.addHandler(ch)


def get_logger_adapter(tid):
    return logging.LoggerAdapter(l, {"tid": tid})
Example #25
0
class Worker(object):

    def __init__(self, init_args=sys.argv):
        self._logger = Logger()
        self._init_endpoint(init_args)

        self.sessions = dict()
        self.sandbox = Sandbox()

        self.service = ev.Service()

        self.disown_timer = ev.Timer(self.on_disown, 2, self.service)
        self.heartbeat_timer = ev.Timer(self.on_heartbeat, 5, self.service)
        self.disown_timer.start()
        self.heartbeat_timer.start()

        self.pipe = Pipe(self.endpoint)
        self.service.bind_on_fd(self.pipe.fileno())

        self.decoder = Decoder()
        self.decoder.bind(self.on_message)

        self.w_stream = WritableStream(self.service, self.pipe)
        self.r_stream = ReadableStream(self.service, self.pipe)
        self.r_stream.bind(self.decoder.decode)


        self.service.register_read_event(self.r_stream._on_event, self.pipe.fileno())
        self._logger.debug("Worker with %s send handshake" % self.id)
        self._send_handshake()

    def _init_endpoint(self, init_args):
        try:
            self.id = init_args[init_args.index("--uuid") + 1]
            app_name = init_args[init_args.index("--app") + 1]
            self.endpoint = init_args[init_args.index("--endpoint") + 1]
        except Exception as err:
            self._logger.error("Wrong cmdline argumensts: %s " % err)
            raise RuntimeError("Wrong cmdline arguments")

    def run(self, binds={}):
        for event, name in binds.iteritems():
            self.on(event, name)
        self.service.run()

    def terminate(self, reason, msg):
        self.w_stream.write(Message(message.RPC_TERMINATE, 0, reason, msg).pack())
        self.service.stop()

    # Event machine
    def on(self, event, callback):
        self.sandbox.on(event, callback)

    # Events
    def on_heartbeat(self):
        self._send_heartbeat()

    def on_message(self, args):
        msg = Message.initialize(args)
        if msg is None:
            return
        elif msg.id == message.RPC_INVOKE: #PROTOCOL_LIST.index("rpc::invoke"):
            try:
                _request = Request()
                _stream = Stream(msg.session, self)
                self.sandbox.invoke(msg.event, _request, _stream)
                self.sessions[msg.session] = _request
            except Exception as err:
                self._logger.error("On invoke error: %s" % err)

        elif msg.id == message.RPC_CHUNK: #PROTOCOL_LIST.index("rpc::chunk"):
            self._logger.debug("Receive chunk: %d" % msg.session)
            _session = self.sessions.get(msg.session, None)
            if _session is not None:
                try:
                    _session.push(msg.data)
                except Exception as err:
                    self._logger.error("On push error: %s" % str(err))

        elif msg.id == message.RPC_CHOKE: #PROTOCOL_LIST.index("rpc::choke"):
            self._logger.debug("Receive choke: %d" % msg.session)
            _session = self.sessions.get(msg.session, None)
            if _session is not None:
                _session.close()
                self.sessions.pop(msg.session)

        elif msg.id == message.RPC_HEARTBEAT: #PROTOCOL_LIST.index("rpc::heartbeat"):
            self.disown_timer.stop()

        elif msg.id == message.RPC_TERMINATE: #PROTOCOL_LIST.index("rpc::terminate"):
            self._logger.debug("Receive terminate. Reason: %s, message: %s " % (msg.reason, msg.message))
            self.terminate(msg.reason, msg.message)

        elif msg.id == message.RPC_ERROR: #PROTOCOL_LIST.index("rpc::error"):
            _session = self.sessions.get(msg.session, None)
            if _session is not None:
                _session.error(RequestError(msg.message))

    def on_disown(self):
        self._logger.error("Disowned")
        self.service.stop()

    # Private:
    def _send_handshake(self):
        self.w_stream.write(Message(message.RPC_HANDSHAKE, 0, self.id).pack())

    def _send_heartbeat(self):
        self.disown_timer.start()
        self.w_stream.write(Message(message.RPC_HEARTBEAT, 0).pack())

    def send_choke(self, session):
        self.w_stream.write(Message(message.RPC_CHOKE, session).pack())

    def send_chunk(self, session, data):
        self.w_stream.write(Message(message.RPC_CHUNK, session, data).pack())