Esempio n. 1
0
from flow.configuration.settings.injector import setting
from flow.util.exit import exit_process
from flow import exit_codes
from injector import inject
from twisted.internet import defer

import flow.interfaces
import logging
import time


LOG = logging.getLogger(__name__)


@inject(broker=flow.interfaces.IBroker,
        binding_config=setting('bindings'))
class ConfigureRabbitMQCommand(CommandBase):
    injector_modules = [
        BrokerConfiguration,
    ]


    @staticmethod
    def annotate_parser(parser):
        pass


    def _execute(self, parsed_arguments):
        exchanges, queues, bindings = self._parse_config()

        LOG.debug("Parsed config")
Esempio n. 2
0
from flow.shell_command.lsf.resource_manager import LSFResourceManager
from pythonlsf import lsf
from twisted.python.procutils import which

import injector
import logging
import os


LOG = logging.getLogger(__name__)


@injector.inject(
    resource_manager=LSFResourceManager,
    option_manager=LSFOptionManager,
    pre_exec=setting('shell_command.lsf.pre_exec'),
    post_exec=setting('shell_command.lsf.post_exec'))
class LSFRequestBuilder(object):
    def __init__(self):
        if self.pre_exec:
            self.pre_exec_command = _localize_cmd(self.pre_exec)
        else:
            self.pre_exec_command = None

        if self.post_exec:
            self.post_exec_command = _localize_cmd(self.post_exec)
        else:
            self.post_exec_command = None

    def construct_request(self, executor_data):
        request = create_empty_request()
Esempio n. 3
0
import logging


LOG = logging.getLogger(__name__)


@inject(broker=flow.interfaces.IBroker)
class ShellCommandServiceInterface(IShellCommand):
    def submit(self, **kwargs):
        message = ShellCommandSubmitMessage(**kwargs)

        return self.broker.publish(self.exchange,
                self.submit_routing_key, message)


@inject(exchange=setting('shell_command.fork.exchange'),
        submit_routing_key=setting('shell_command.fork.submit_routing_key'))
class ForkShellCommandServiceInterface(ShellCommandServiceInterface):
    pass


@inject(exchange=setting('shell_command.lsf.exchange'),
        submit_routing_key=setting('shell_command.lsf.submit_routing_key'))
class LSFShellCommandServiceInterface(ShellCommandServiceInterface):
    def submit(self, **kwargs):
        # XXX Additional message validation:
        #       executor_data must contain response places, net key, etc.
        #       executor_data must not contain working_directory (easy mistake)

        return ShellCommandServiceInterface.submit(self, **kwargs)
Esempio n. 4
0
        else:
            self.flag = None
        self.type = TYPE_MAP[type]
        self.options_name = 'options%s' % option_set

    def set_option(self, request, value):
        cast_value = self.type(value)
        setattr(request, self.name, cast_value)

        if self.flag is not None:
            options = getattr(request, self.options_name)
            setattr(request, self.options_name, options | int(self.flag))


@injector.inject(
    option_definitions=setting('shell_command.lsf.available_options'),
    default_options=setting('shell_command.lsf.default_options'))
class LSFOptionManager(object):
    def __init__(self):
        self.available_options = factory.build_objects(
                self.option_definitions, MODULE, 'LSFOption')

    def set_default_options(self, request):
        for option, value in self.default_options.iteritems():
            self.available_options[option].set_option(request, value)

    def set_options(self, request, executor_data):
        self.set_default_options(request)

        lsf_options = executor_data.get('lsf_options', {})
Esempio n. 5
0
from flow.shell_command.messages import ShellCommandSubmitMessage
from flow.util import environment as env_util
from injector import inject
from twisted.internet import defer, reactor
from flow.shell_command.monitor import ExecutorMonitor

import logging
import os
import sys
import twisted.python.procutils


LOG = logging.getLogger(__name__)


@inject(default_environment=setting('shell_command.default_environment', {}),
        mandatory_environment=setting(
            'shell_command.mandatory_environment', {}))
class ShellCommandSubmitMessageHandler(Handler):
    message_class = ShellCommandSubmitMessage

    @property
    def executable(self):
        if is_executable(os.path.abspath(self.executable_name)):
            return os.path.abspath(self.executable_name)
        elif is_executable(os.path.join(os.path.dirname(sys.executable),
                self.executable_name)):
            return os.path.join(os.path.dirname(sys.executable),
                    self.executable_name)
        else:
            return twisted.python.procutils.which(self.executable_name)[0]
Esempio n. 6
0

@inject(storage=WorkflowHistorianStorage)
class HistorianHandlerBase(Handler):
    def _handle_message(self, message):
        try:
            self._handle_historian_message(message)
            return defer.succeed(None)

        except EXIT_ON:
            LOG.exception("This historian cannot handle messages anymore, "
                    "because it lost access to Oracle... exiting.")
            exit_process(exit_codes.EXECUTE_FAILURE)


@inject(queue_name=setting('workflow.historian.update_queue'))
class HistorianUpdateHandler(HistorianHandlerBase):
    message_class = messages.UpdateMessage

    def _handle_historian_message(self, message):
        message_dict = self._get_message_dict(message)
        LOG.debug("Updating [net_key='%s', operation_id='%s']: %r",
                message.operation_data['net_key'],
                message.operation_data['operation_id'], message_dict)
        self.storage.update(message_dict)

    def _get_message_dict(self, message):
        message_dict = message.to_dict()

        message_dict['status'] = Status(message_dict['status'])
Esempio n. 7
0
from flow.configuration.settings.injector import setting
from flow.interfaces import IServiceLocator
from flow.shell_command.handler_base import ShellCommandSubmitMessageHandler
from injector import inject
from twisted.internet import defer

import os
import socket


@inject(queue_name=setting('shell_command.fork.queue'),
        exchange=setting('shell_command.fork.exchange'),
        response_routing_key=setting('shell_command.fork.response_routing_key'),
        service_interfaces=IServiceLocator)
class ForkShellCommandMessageHandler(ShellCommandSubmitMessageHandler):
    executable_name = 'flow-fork-shell-command-executor'

    def on_job_id_success(self, job_id, callback_data=None,
            job_id_handled=None):
        deferreds = []
        dispatch_data = {'job_id': job_id}
        deferreds.append(self.send_message('msg: dispatch_success',
            callback_data, token_data=dispatch_data))

        execute_data = {'hostname': socket.gethostname()}
        deferreds.append(self.send_message('msg: execute_begin',
            callback_data, token_data=execute_data))

        dlist = defer.gatherResults(deferreds, consumeErrors=True)

        dlist.addCallbacks(job_id_handled.callback, job_id_handled.errback)
Esempio n. 8
0
LOG = logging.getLogger(__name__)


def _build_command_cmdline(method, action_id, inputs_file, outputs_file):
    return ["command", method, action_id, inputs_file.name, outputs_file.name]


def _build_event_cmdline(method, action_id, inputs_file, outputs_file):
    return ["event", method, action_id, outputs_file.name]


CMDLINE_BUILDERS = {"command": _build_command_cmdline, "event": _build_event_cmdline}


@inject(perl_wrapper=setting("workflow.perl_wrapper"), storage=flow.interfaces.IStorage)
class WorkflowWrapperCommand(CommandBase):
    injector_modules = [RedisConfiguration]

    @staticmethod
    def annotate_parser(parser):
        parser.add_argument("--method", required=True, help="shortcut or execute")
        parser.add_argument("--action-type", required=True, help="event or command")
        parser.add_argument("--action-id", required=True, help="event_id or perl_class")

        parser.add_argument("--net-key", required=True, help="used to look up inputs")
        parser.add_argument("--operation-id", type=int, required=True, help="used to look up inputs")

        parser.add_argument("--parallel-id", default="[]", help="used to look up inputs")

    def _execute(self, parsed_arguments):
Esempio n. 9
0
from injector import inject
from pika.adapters import twisted_connection
from twisted.internet import reactor, defer, protocol
from flow.exit_codes import (EXECUTE_SYSTEM_FAILURE,
        EXECUTE_ERROR,
        EXECUTE_SERVICE_UNAVAILABLE)
from flow.util.defer import add_callback_and_default_errback

import logging
import pika


LOG = logging.getLogger(__name__)

@inject(
    hostname=setting('amqp.hostname'),
    port=setting('amqp.port'),
    virtual_host=setting('amqp.vhost'),
    retry_delay=setting('amqp.retry_delay'),
    connection_attempts=setting('amqp.connection_attempts'),
    prefetch_count=setting('amqp.prefetch_count'),
    heartbeat_interval=setting('amqp.heartbeat_interval'),
)
class ConnectionParams(object):
    @property
    def pika_params(self):
        return pika.ConnectionParameters(host=self.hostname, port=self.port,
                virtual_host=self.virtual_host,
                heartbeat_interval=self.heartbeat_interval)

DISCONNECTED = 'disconnected'
Esempio n. 10
0
"""


TABLES = namedtuple('Tables', ['historian', 'instance', 'execution'])
SEQUENCES = namedtuple('Sequences', ['instance', 'execution'])
STATEMENTS = namedtuple('Statements', STATEMENTS_DICT.keys())

def on_oracle_connect(connection, record):
    cursor = connection.cursor()
    cursor.execute("alter session set NLS_DATE_FORMAT = "
            "'YYYY-MM-DD HH24:MI:SS'")
    cursor.execute("alter session set NLS_TIMESTAMP_FORMAT = "
            "'YYYY-MM-DD HH24:MI:SSXFF'")
    cursor.close()

@inject(connection_string=setting('workflow.historian.connection_string'),
        owner=setting('workflow.historian.owner'))
class WorkflowHistorianStorage(object):
    def __init__(self):
        self.statements = STATEMENTS(**{k:v % self.owner
                for k, v in STATEMENTS_DICT.items()})
        self.tables = TABLES(historian='%s.workflow_historian' % self.owner,
                instance='%s.workflow_instance' % self.owner,
                execution='%s.workflow_instance_execution' % self.owner)
        self.sequences = SEQUENCES(
                instance='%s.workflow_instance_seq' % self.owner,
                execution='%s.workflow_execution_seq' % self.owner)

        self.engine = create_engine(self.connection_string,
                case_sensitive=False, poolclass=StaticPool)
Esempio n. 11
0
from flow.configuration.settings.injector import setting
from flow.orchestrator.messages import CreateTokenMessage, NotifyPlaceMessage
from flow.orchestrator.messages import NotifyTransitionMessage
from injector import inject

import flow.interfaces
import logging


LOG = logging.getLogger(__name__)


@inject(broker=flow.interfaces.IBroker,
        create_token_exchange=
            setting('orchestrator.create_token_exchange'),
        create_token_routing_key=
            setting('orchestrator.create_token_routing_key'),
        notify_place_exchange=
            setting('orchestrator.notify_place_exchange'),
        notify_place_routing_key=
            setting('orchestrator.notify_place_routing_key'),
        notify_transition_exchange=
            setting('orchestrator.notify_transition_exchange'),
        notify_transition_routing_key=
            setting('orchestrator.notify_transition_routing_key'))
class OrchestratorServiceInterface(flow.interfaces.IOrchestrator):
    def create_token(self, net_key, place_idx,
            color, color_group_idx, data=None):
        message = CreateTokenMessage(net_key=net_key, place_idx=place_idx,
                color=color, color_group_idx=color_group_idx, data=data)
        return self.broker.publish(self.create_token_exchange,
Esempio n. 12
0
from collections import deque, defaultdict
from flow.configuration.settings.injector import setting
from injector import inject
from twisted.internet import defer, reactor

import flow.interfaces
import logging


LOG = logging.getLogger(__name__)


@inject(bindings=setting('bindings'))
class LocalBroker(flow.interfaces.IBroker):
    def __init__(self):
        self.bindings = _transform_bindings(self.bindings)
        self.queue = deque()
        self.handlers = {}

    def publish(self, exchange_name, routing_key, message):
        encoded_message = message.encode()
        return self.raw_publish(exchange_name, routing_key, encoded_message)

    def register_handler(self, handler):
        LOG.debug('Registering handler on %s', handler.queue_name)
        self.handlers[handler.queue_name] = handler

    def raw_publish(self, exchange, routing_key, encoded_message):
        LOG.debug('Putting message for exchange (%s), '
                  'routing_key (%s) in queue: %s',
                  exchange, routing_key, encoded_message)
Esempio n. 13
0
from flow.configuration.settings.injector import setting
from flow.shell_command import factory
from flow.shell_command import resource_types
from flow.shell_command.lsf import resource

import injector


@injector.inject(
    resource_type_definitions=setting('shell_command.resource_types'),
    resource_definitions=setting('shell_command.lsf.supported_resources'))
class LSFResourceManager(object):
    def __init__(self):
        self.resource_types = resource_types.make_resource_types(
                self.resource_type_definitions)
        self.available_resources = {}
        self.available_resources['limit'] = factory.build_objects(
                self.resource_definitions.get('limit', {}), resource)
        self.available_resources['request'] = factory.build_objects(
                self.resource_definitions.get('request', {}), resource)
        self.available_resources['reserve'] = factory.build_objects(
                self.resource_definitions.get('reserve', {}), resource)

    def set_resources(self, request, executor_data):
        resources = resource_types.make_all_resource_objects(
                executor_data.get('resources', {}), self.resource_types)
        resource.set_all_resources(request, resources, self.available_resources)
Esempio n. 14
0
from flow.configuration.settings.injector import setting
from flow.handler import Handler
from flow.orchestrator.messages import CreateTokenMessage, NotifyPlaceMessage
from flow.orchestrator.messages import NotifyTransitionMessage
from flow.redisom import get_object
from injector import inject

import flow.interfaces
import logging


LOG = logging.getLogger(__name__)

@inject(redis=flow.interfaces.IStorage,
        service_interfaces=flow.interfaces.IServiceLocator,
        queue_name=setting('orchestrator.create_token_queue'))
class PetriCreateTokenHandler(Handler):
    message_class = CreateTokenMessage

    def _handle_message(self, message):
        net = get_object(self.redis, message.net_key)

        create_token_kwargs = getattr(message, 'create_token_kwargs', {})

        return net.create_put_notify(message.place_idx,
                self.service_interfaces,
                color=message.color,
                color_group_idx=message.color_group_idx,
                data=getattr(message, 'data', {}))

Esempio n. 15
0
from flow.configuration.settings.injector import setting

import injector
import json
import re
import requests


@injector.inject(bindings=setting('bindings'),
        hostname=setting('amqp.hostname'),
        port=setting('amqp.api_port'), virtual_host=setting('amqp.vhost'))
class RabbitMQAPI(object):
    @property
    def auth(self):
        return ('guest', 'guest')

    @property
    def parameters(self):
        return {
            'hostname': self.hostname,
            'port': self.port,
            'virtual_host': self.virtual_host,
        }

    def request_string(self, api_substring):
        template = 'http://%(hostname)s:%(port)s/api/' + api_substring
        return template % self.parameters


    def vhost_status(self):
        response = requests.get(self.request_string('vhosts/%(virtual_host)s'),