Ejemplo n.º 1
0
class Migrate_0_13_x_to_1_1_0(object):
    """
    Handles migration of messaging setup from 0.13.x to 1.1.
    """

    # All these queues are either not required due to name
    # changes or changes in durability proeprties.
    OLD_QS = [
        # Name changed in 1.1
        reactor.get_trigger_cud_queue('st2.trigger.watch.timers', routing_key='#'),
        # Split to multiple queues in 1.1
        reactor.get_trigger_cud_queue('st2.trigger.watch.sensorwrapper', routing_key='#'),
        # Name changed in 1.1
        reactor.get_trigger_cud_queue('st2.trigger.watch.webhooks', routing_key='#')
    ]

    def migrate(self):
        self._cleanup_old_queues()

    def _cleanup_old_queues(self):
        with Connection(transport_utils.get_messaging_urls()) as connection:
            for q in self.OLD_QS:
                bound_q = q(connection.default_channel)
                try:
                    bound_q.delete()
                except:
                    print('Failed to delete %s.' % q.name)
                    traceback.print_exc()
Ejemplo n.º 2
0
 def _get_queue(queue_suffix):
     if not queue_suffix:
         # pick last 10 digits of uuid. Arbitrary but unique enough for the TriggerWatcher.
         u_hex = uuid.uuid4().hex
         queue_suffix = uuid.uuid4().hex[len(u_hex) - 10:]
     queue_name = 'st2.trigger.watch.%s' % queue_suffix
     return reactor.get_trigger_cud_queue(queue_name, routing_key='#')
Ejemplo n.º 3
0
 def _get_queue(queue_suffix):
     if not queue_suffix:
         # pick last 10 digits of uuid. Arbitrary but unique enough for the TriggerWatcher.
         u_hex = uuid.uuid4().hex
         queue_suffix = uuid.uuid4().hex[len(u_hex) - 10:]
     queue_name = 'st2.trigger.watch.%s' % queue_suffix
     return reactor.get_trigger_cud_queue(queue_name, routing_key='#')
Ejemplo n.º 4
0
 def _get_queue(queue_suffix, exclusive):
     queue_name = queue_utils.get_queue_name(
         queue_name_base='st2.trigger.watch',
         queue_name_suffix=queue_suffix,
         add_random_uuid_to_suffix=True)
     return reactor.get_trigger_cud_queue(queue_name,
                                          routing_key='#',
                                          exclusive=exclusive)
Ejemplo n.º 5
0
QUEUES = [
    ACTIONSCHEDULER_REQUEST_QUEUE,
    ACTIONRUNNER_WORK_QUEUE,
    ACTIONRUNNER_CANCEL_QUEUE,
    NOTIFIER_ACTIONUPDATE_WORK_QUEUE,
    RESULTSTRACKER_ACTIONSTATE_WORK_QUEUE,
    RULESENGINE_WORK_QUEUE,
    STREAM_ANNOUNCEMENT_WORK_QUEUE,
    STREAM_EXECUTION_ALL_WORK_QUEUE,
    STREAM_LIVEACTION_WORK_QUEUE,
    STREAM_EXECUTION_OUTPUT_QUEUE,
    WORKFLOW_EXECUTION_WORK_QUEUE,
    WORKFLOW_EXECUTION_RESUME_QUEUE,
    # Those queues are dynamically / late created on some class init but we still need to
    # pre-declare them for redis Kombu backend to work.
    reactor.get_trigger_cud_queue(name="st2.preinit", routing_key="init"),
    reactor.get_sensor_cud_queue(name="st2.preinit", routing_key="init"),
]


def _do_register_exchange(exchange, connection, channel, retry_wrapper):
    try:
        kwargs = {
            "exchange": exchange.name,
            "type": exchange.type,
            "durable": exchange.durable,
            "auto_delete": exchange.auto_delete,
            "arguments": exchange.arguments,
            "nowait": False,
            "passive": False,
        }
Ejemplo n.º 6
0
# and don't get lost even if there are no consumers online
QUEUES = [
    ACTIONSCHEDULER_REQUEST_QUEUE,
    ACTIONRUNNER_WORK_QUEUE,
    ACTIONRUNNER_CANCEL_QUEUE,
    EXPORTER_WORK_QUEUE,
    NOTIFIER_ACTIONUPDATE_WORK_QUEUE,
    RESULTSTRACKER_ACTIONSTATE_WORK_QUEUE,
    RULESENGINE_WORK_QUEUE,
    STREAM_ANNOUNCEMENT_WORK_QUEUE,
    STREAM_EXECUTION_WORK_QUEUE,
    STREAM_LIVEACTION_WORK_QUEUE,

    # Those queues are dynamically / late created on some class init but we still need to
    # pre-declare them for redis Kombu backend to work.
    reactor.get_trigger_cud_queue(name='st2.preinit', routing_key='init'),
    reactor.get_sensor_cud_queue(name='st2.preinit', routing_key='init')
]


def _do_register_exchange(exchange, connection, channel, retry_wrapper):
    try:
        kwargs = {
            'exchange': exchange.name,
            'type': exchange.type,
            'durable': exchange.durable,
            'auto_delete': exchange.auto_delete,
            'arguments': exchange.arguments,
            'nowait': False,
            'passive': False
        }
Ejemplo n.º 7
0
 def _get_queue(queue_suffix, exclusive):
     queue_name = queue_utils.get_queue_name(queue_name_base='st2.trigger.watch',
                                             queue_name_suffix=queue_suffix,
                                             add_random_uuid_to_suffix=True
                                             )
     return reactor.get_trigger_cud_queue(queue_name, routing_key='#', exclusive=exclusive)
Ejemplo n.º 8
0
    ACTIONRUNNER_CANCEL_QUEUE,
    NOTIFIER_ACTIONUPDATE_WORK_QUEUE,
    RESULTSTRACKER_ACTIONSTATE_WORK_QUEUE,
    RULESENGINE_WORK_QUEUE,

    STREAM_ANNOUNCEMENT_WORK_QUEUE,
    STREAM_EXECUTION_ALL_WORK_QUEUE,
    STREAM_LIVEACTION_WORK_QUEUE,
    STREAM_EXECUTION_OUTPUT_QUEUE,

    WORKFLOW_EXECUTION_WORK_QUEUE,
    WORKFLOW_EXECUTION_RESUME_QUEUE,

    # Those queues are dynamically / late created on some class init but we still need to
    # pre-declare them for redis Kombu backend to work.
    reactor.get_trigger_cud_queue(name='st2.preinit', routing_key='init'),
    reactor.get_sensor_cud_queue(name='st2.preinit', routing_key='init')
]


def _do_register_exchange(exchange, connection, channel, retry_wrapper):
    try:
        kwargs = {
            'exchange': exchange.name,
            'type': exchange.type,
            'durable': exchange.durable,
            'auto_delete': exchange.auto_delete,
            'arguments': exchange.arguments,
            'nowait': False,
            'passive': False
        }