Пример #1
0
        #     'schedule': timedelta(seconds=10000),
        #     'options': {'queue': 'user_info', 'routing_key': 'for_userinfo'}
        # },
        'weiboinfo_task': {
            'task': 'tasks.get_weibo_info.get_weibo_info',
            'schedule': timedelta(seconds=1000),
            'options': {
                'queue': 'weiboinfo',
                'routing_key': 'for_weiboinfo'
            }
        },
        'info_other_task': {
            'task': 'tasks.getuserinfo.get_user',
            'schedule': timedelta(seconds=1000),
            'options': {
                'queue': 'otherinfo',
                'routing_key': 'for_otherinfo'
            }
        },
    },
    CELERY_QUEUES=(
        # Queue('user_info', exchange=Exchange('user_info', type='direct'),
        #       routing_key='for_userinfo'),
        Queue('weiboinfo',
              exchange=Exchange('weiboinfo', type='direct'),
              routing_key='for_weiboinfo'),
        Queue('otherinfo',
              exchange=Exchange('otherinfo', type='direct'),
              routing_key='for_otherinfo'),
    ))
Пример #2
0
    def ready(self):
        super(DocumentsApp, self).ready()
        from actstream import registry

        DeletedDocument = self.get_model('DeletedDocument')
        Document = self.get_model('Document')
        DocumentPage = self.get_model('DocumentPage')
        DocumentPageResult = self.get_model('DocumentPageResult')
        DocumentType = self.get_model('DocumentType')
        DocumentTypeFilename = self.get_model('DocumentTypeFilename')
        DocumentVersion = self.get_model('DocumentVersion')
        DuplicatedDocument = self.get_model('DuplicatedDocument')

        DynamicSerializerField.add_serializer(
            klass=Document,
            serializer_class='documents.serializers.DocumentSerializer')

        MissingItem(
            label=_('Create a document type'),
            description=_(
                'Every uploaded document must be assigned a document type, '
                'it is the basic way Mayan EDMS categorizes documents.'),
            condition=lambda: not DocumentType.objects.exists(),
            view='documents:document_type_list')

        ModelAttribute(Document,
                       label=_('Label'),
                       name='label',
                       type_name='field')

        ModelAttribute(
            Document,
            description=_(
                'The MIME type of any of the versions of a document'),
            label=_('MIME type'),
            name='versions__mimetype',
            type_name='field')

        ModelEventType.register(model=DocumentType,
                                event_types=(event_document_create, ))
        ModelEventType.register(model=Document,
                                event_types=(event_document_download,
                                             event_document_properties_edit,
                                             event_document_type_change,
                                             event_document_new_version,
                                             event_document_version_revert,
                                             event_document_view))

        ModelPermission.register(model=Document,
                                 permissions=(
                                     permission_acl_edit,
                                     permission_acl_view,
                                     permission_document_delete,
                                     permission_document_download,
                                     permission_document_edit,
                                     permission_document_new_version,
                                     permission_document_print,
                                     permission_document_properties_edit,
                                     permission_document_restore,
                                     permission_document_trash,
                                     permission_document_version_revert,
                                     permission_document_version_view,
                                     permission_document_view,
                                     permission_events_view,
                                     permission_transformation_create,
                                     permission_transformation_delete,
                                     permission_transformation_edit,
                                     permission_transformation_view,
                                 ))

        ModelPermission.register(model=DocumentType,
                                 permissions=(permission_document_create,
                                              permission_document_type_delete,
                                              permission_document_type_edit,
                                              permission_document_type_view))

        ModelPermission.register_proxy(
            source=Document,
            model=DocumentType,
        )

        ModelPermission.register_inheritance(
            model=Document,
            related='document_type',
        )
        ModelPermission.register_inheritance(
            model=DocumentPage,
            related='document',
        )
        ModelPermission.register_inheritance(
            model=DocumentPageResult,
            related='document_version__document',
        )
        ModelPermission.register_inheritance(
            model=DocumentTypeFilename,
            related='document_type',
        )
        ModelPermission.register_inheritance(
            model=DocumentVersion,
            related='document',
        )

        # Document and document page thumbnail widget
        document_page_thumbnail_widget = DocumentPageThumbnailWidget()

        # Document
        SourceColumn(source=Document,
                     label=_('Thumbnail'),
                     func=lambda context: document_page_thumbnail_widget.
                     render(instance=context['object']))
        SourceColumn(source=Document, attribute='document_type')
        SourceColumn(source=Document,
                     label=_('Pages'),
                     func=lambda context: widget_document_page_number(
                         document=context['object']))

        # DocumentPage
        SourceColumn(source=DocumentPage,
                     label=_('Thumbnail'),
                     func=lambda context: document_page_thumbnail_widget.
                     render(instance=context['object']))

        SourceColumn(source=DocumentPageResult,
                     label=_('Thumbnail'),
                     func=lambda context: document_page_thumbnail_widget.
                     render(instance=context['object']))

        SourceColumn(source=DocumentPageResult,
                     label=_('Type'),
                     attribute='document_version.document.document_type')

        # DocumentType
        SourceColumn(source=DocumentType,
                     label=_('Documents'),
                     func=lambda context: context['object'].get_document_count(
                         user=context['request'].user))

        SourceColumn(
            source=DocumentTypeFilename,
            label=_('Enabled'),
            func=lambda context: two_state_template(context['object'].enabled))

        # DeletedDocument
        SourceColumn(source=DeletedDocument,
                     label=_('Thumbnail'),
                     func=lambda context: document_page_thumbnail_widget.
                     render(instance=context['object']))

        SourceColumn(source=DeletedDocument, attribute='document_type')
        SourceColumn(source=DeletedDocument, attribute='deleted_date_time')

        # DocumentVersion
        SourceColumn(source=DocumentVersion,
                     label=_('Thumbnail'),
                     func=lambda context: document_page_thumbnail_widget.
                     render(instance=context['object']))
        SourceColumn(source=DocumentVersion, attribute='timestamp')
        SourceColumn(source=DocumentVersion,
                     label=_('Pages'),
                     func=lambda context: widget_document_version_page_number(
                         document_version=context['object']))
        SourceColumn(source=DocumentVersion, attribute='mimetype')
        SourceColumn(source=DocumentVersion, attribute='encoding')
        SourceColumn(source=DocumentVersion, attribute='comment')

        # DuplicatedDocument
        SourceColumn(source=DuplicatedDocument,
                     label=_('Thumbnail'),
                     func=lambda context: document_page_thumbnail_widget.
                     render(instance=context['object'].document))
        SourceColumn(source=DuplicatedDocument,
                     label=_('Duplicates'),
                     func=lambda context: context['object'].documents.count())

        app.conf.CELERYBEAT_SCHEDULE.update({
            'task_check_delete_periods': {
                'task': 'documents.tasks.task_check_delete_periods',
                'schedule': timedelta(seconds=CHECK_DELETE_PERIOD_INTERVAL),
            },
            'task_check_trash_periods': {
                'task': 'documents.tasks.task_check_trash_periods',
                'schedule': timedelta(seconds=CHECK_TRASH_PERIOD_INTERVAL),
            },
            'task_delete_stubs': {
                'task': 'documents.tasks.task_delete_stubs',
                'schedule': timedelta(seconds=DELETE_STALE_STUBS_INTERVAL),
            },
        })

        app.conf.CELERY_QUEUES.extend((
            Queue('converter',
                  Exchange('converter'),
                  routing_key='converter',
                  delivery_mode=1),
            Queue('documents_periodic',
                  Exchange('documents_periodic'),
                  routing_key='documents_periodic',
                  delivery_mode=1),
            Queue('uploads', Exchange('uploads'), routing_key='uploads'),
            Queue('documents', Exchange('documents'), routing_key='documents'),
        ))

        app.conf.CELERY_ROUTES.update({
            'documents.tasks.task_check_delete_periods': {
                'queue': 'documents_periodic'
            },
            'documents.tasks.task_check_trash_periods': {
                'queue': 'documents_periodic'
            },
            'documents.tasks.task_clean_empty_duplicate_lists': {
                'queue': 'documents'
            },
            'documents.tasks.task_clear_image_cache': {
                'queue': 'tools'
            },
            'documents.tasks.task_delete_document': {
                'queue': 'documents'
            },
            'documents.tasks.task_delete_stubs': {
                'queue': 'documents_periodic'
            },
            'documents.tasks.task_generate_document_page_image': {
                'queue': 'converter'
            },
            'documents.tasks.task_scan_duplicates_all': {
                'queue': 'tools'
            },
            'documents.tasks.task_scan_duplicates_for': {
                'queue': 'uploads'
            },
            'documents.tasks.task_update_page_count': {
                'queue': 'uploads'
            },
            'documents.tasks.task_upload_new_version': {
                'queue': 'uploads'
            },
        })

        dashboard_main.add_widget(widget=widget_document_types)
        dashboard_main.add_widget(widget=widget_documents_in_trash)
        dashboard_main.add_widget(widget=widget_new_documents_this_month)
        dashboard_main.add_widget(widget=widget_pages_per_month)
        dashboard_main.add_widget(widget=widget_total_documents)

        menu_documents.bind_links(links=(link_document_list_recent,
                                         link_document_list,
                                         link_document_list_deleted,
                                         link_duplicated_document_list))

        menu_main.bind_links(links=(menu_documents, ), position=0)

        menu_setup.bind_links(links=(link_document_type_setup, ))
        menu_tools.bind_links(links=(link_clear_image_cache,
                                     link_duplicated_document_scan))

        # Document type links
        menu_object.bind_links(
            links=(link_document_type_edit, link_document_type_filename_list,
                   link_acl_list,
                   link_object_event_types_user_subcriptions_list,
                   link_document_type_delete),
            sources=(DocumentType, ))
        menu_object.bind_links(links=(link_document_type_filename_edit,
                                      link_document_type_filename_delete),
                               sources=(DocumentTypeFilename, ))
        menu_secondary.bind_links(links=(link_document_type_list,
                                         link_document_type_create),
                                  sources=(DocumentType,
                                           'documents:document_type_create',
                                           'documents:document_type_list'))
        menu_sidebar.bind_links(
            links=(link_document_type_filename_create, ),
            sources=(DocumentTypeFilename,
                     'documents:document_type_filename_list',
                     'documents:document_type_filename_create'))
        menu_sidebar.bind_links(links=(link_trash_can_empty, ),
                                sources=('documents:document_list_deleted',
                                         'documents:trash_can_empty'))

        # Document object links
        menu_object.bind_links(links=(
            link_document_edit,
            link_document_document_type_edit,
            link_document_print,
            link_document_trash,
            link_document_quick_download,
            link_document_download,
            link_document_clear_transformations,
            link_document_clone_transformations,
            link_document_update_page_count,
        ),
                               sources=(Document, ))
        menu_object.bind_links(links=(link_document_restore,
                                      link_document_delete),
                               sources=(DeletedDocument, ))

        # Document facet links
        menu_facet.bind_links(links=(
            link_document_duplicates_list,
            link_acl_list_with_icon,
        ),
                              sources=(Document, ))
        menu_facet.bind_links(links=(link_document_preview, ),
                              sources=(Document, ),
                              position=0)
        menu_facet.bind_links(links=(link_document_properties, ),
                              sources=(Document, ),
                              position=2)
        menu_facet.bind_links(links=(
            link_events_for_object,
            link_object_event_types_user_subcriptions_list_with_icon,
            link_document_version_list,
        ),
                              sources=(Document, ),
                              position=2)
        menu_facet.bind_links(links=(link_document_pages, ),
                              sources=(Document, ))

        # Document actions
        menu_object.bind_links(links=(link_document_version_revert,
                                      link_document_version_download),
                               sources=(DocumentVersion, ))
        menu_multi_item.bind_links(
            links=(link_document_multiple_clear_transformations,
                   link_document_multiple_trash,
                   link_document_multiple_download,
                   link_document_multiple_update_page_count,
                   link_document_multiple_document_type_edit),
            sources=(Document, ))
        menu_multi_item.bind_links(links=(link_document_multiple_restore,
                                          link_document_multiple_delete),
                                   sources=(DeletedDocument, ))

        # Document pages
        menu_facet.bind_links(
            links=(link_document_page_rotate_left,
                   link_document_page_rotate_right, link_document_page_zoom_in,
                   link_document_page_zoom_out, link_document_page_view_reset),
            sources=('documents:document_page_view', ))
        menu_facet.bind_links(links=(link_document_page_return,
                                     link_document_page_view),
                              sources=(DocumentPage, ))
        menu_facet.bind_links(links=(link_document_page_navigation_first,
                                     link_document_page_navigation_previous,
                                     link_document_page_navigation_next,
                                     link_document_page_navigation_last,
                                     link_transformation_list),
                              sources=(DocumentPage, ))
        menu_object.bind_links(links=(link_transformation_list, ),
                               sources=(DocumentPage, ))

        # Document versions
        menu_facet.bind_links(links=(link_document_version_return_document,
                                     link_document_version_return_list),
                              sources=(DocumentVersion, ))
        menu_facet.bind_links(links=(link_document_version_view, ),
                              sources=(DocumentVersion, ))

        post_delete.connect(
            dispatch_uid='handler_remove_empty_duplicates_lists',
            receiver=handler_remove_empty_duplicates_lists,
            sender=Document,
        )
        post_initial_setup.connect(create_default_document_type,
                                   dispatch_uid='create_default_document_type')
        post_version_upload.connect(
            handler_scan_duplicates_for,
            dispatch_uid='handler_scan_duplicates_for',
        )

        registry.register(DeletedDocument)
        registry.register(Document)
        registry.register(DocumentType)
        registry.register(DocumentVersion)
Пример #3
0
 def test_compat_exchange_is_Exchange(self):
     producer = TaskPublisher(exchange=Exchange('foo'), app=self.app)
     self.assertEqual(producer.exchange.name, 'foo')
Пример #4
0
# THIRD PARTY APPS

# Auth0 setup
AUTH0_DOMAIN = env('AUTH0_DOMAIN', default="auth.mozilla.auth0.com")
AUTH0_CLIENTID = env('AUTH0_CLIENTID', default="q8fZZFfGEmSB2c5uSI8hOkKdDGXnlo5z")

# Celery

# TODO: Replace the use of different log parser queues for failures vs not with the
# RabbitMQ priority feature (since the idea behind separate queues was only to ensure
# failures are dealt with first if there is a backlog). After that it should be possible
# to simplify the queue configuration, by using the recommended CELERY_TASK_ROUTES instead:
# http://docs.celeryproject.org/en/latest/userguide/routing.html#automatic-routing
CELERY_TASK_QUEUES = [
    Queue('default', Exchange('default'), routing_key='default'),
    Queue('log_parser', Exchange('default'), routing_key='log_parser.normal'),
    Queue('log_parser_fail', Exchange('default'), routing_key='log_parser.failures'),
    Queue('log_autoclassify', Exchange('default'), routing_key='autoclassify.normal'),
    Queue('log_autoclassify_fail', Exchange('default'), routing_key='autoclassify.failures'),
    Queue('pushlog', Exchange('default'), routing_key='pushlog'),
    Queue('generate_perf_alerts', Exchange('default'), routing_key='generate_perf_alerts'),
    Queue('store_pulse_tasks', Exchange('default'), routing_key='store_pulse_tasks'),
    Queue('store_pulse_pushes', Exchange('default'), routing_key='store_pulse_pushes'),
    Queue('seta_analyze_failures', Exchange('default'), routing_key='seta_analyze_failures'),
]

# Force all queues to be explicitly listed in `CELERY_TASK_QUEUES` to help prevent typos
# and so that `lints/queuelint.py` can check a corresponding worker exists in `Procfile`.
CELERY_TASK_CREATE_MISSING_QUEUES = False
Пример #5
0
    def test(self):
        """
        Loads a test file that includes crafted bgp updates as
        input and expected messages as output.
        """
        RABBITMQ_USER = os.getenv("RABBITMQ_USER", "guest")
        RABBITMQ_PASS = os.getenv("RABBITMQ_PASS", "guest")
        RABBITMQ_HOST = os.getenv("RABBITMQ_HOST", "rabbitmq")
        RABBITMQ_PORT = os.getenv("RABBITMQ_PORT", 5672)
        RABBITMQ_URI = "amqp://{}:{}@{}:{}//".format(
            RABBITMQ_USER, RABBITMQ_PASS, RABBITMQ_HOST, RABBITMQ_PORT
        )

        # exchanges
        self.update_exchange = Exchange(
            "bgp-update", type="direct", durable=False, delivery_mode=1
        )

        self.hijack_exchange = Exchange(
            "hijack-update", type="direct", durable=False, delivery_mode=1
        )

        self.pg_amq_bridge = Exchange(
            "amq.direct", type="direct", durable=True, delivery_mode=1
        )

        # queues
        self.update_queue = Queue(
            "detection-testing",
            exchange=self.pg_amq_bridge,
            routing_key="update-update",
            durable=False,
            auto_delete=True,
            max_priority=1,
            consumer_arguments={"x-priority": 1},
        )

        self.hijack_queue = Queue(
            "hijack-testing",
            exchange=self.hijack_exchange,
            routing_key="update",
            durable=False,
            auto_delete=True,
            max_priority=1,
            consumer_arguments={"x-priority": 1},
        )

        self.hijack_db_queue = Queue(
            "hijack-db-testing",
            exchange=self.pg_amq_bridge,
            routing_key="hijack-update",
            durable=False,
            auto_delete=True,
            max_priority=1,
            consumer_arguments={"x-priority": 1},
        )

        with Connection(RABBITMQ_URI) as connection:
            print("Waiting for pg_amq exchange..")
            Tester.waitExchange(self.pg_amq_bridge, connection.default_channel)
            print("Waiting for hijack exchange..")
            Tester.waitExchange(self.hijack_exchange, connection.default_channel)
            print("Waiting for update exchange..")
            Tester.waitExchange(self.update_exchange, connection.default_channel)

            # query database for the states of the processes
            db_con = self.getDbConnection()
            db_cur = db_con.cursor()
            query = "SELECT name FROM process_states WHERE running=True"
            running_modules = set()
            # wait until all 6 modules are running
            while len(running_modules) < 6:
                db_cur.execute(query)
                entries = db_cur.fetchall()
                for entry in entries:
                    running_modules.add(entry[0])
                db_con.commit()
                print("Running modules: {}".format(running_modules))
                print("{}/6 modules are running.".format(len(running_modules)))
                time.sleep(1)

            Tester.config_request_rpc(connection)

            time.sleep(10)

            # call all helper functions
            Helper.hijack_resolve(
                db_con, connection, "a", "139.5.46.0/24", "S|0|-|-", 133720
            )
            Helper.hijack_mitigate(db_con, connection, "b", "10.91.236.0/24")
            Helper.hijack_ignore(
                db_con, connection, "c", "139.5.237.0/24", "S|0|-|-", 136334
            )
            Helper.hijack_comment(db_con, connection, "d", "test")
            Helper.hijack_ack(db_con, connection, "e", "true")
            Helper.hijack_multiple_action(
                db_con, connection, ["f", "g"], "hijack_action_acknowledge"
            )
            Helper.hijack_multiple_action(
                db_con, connection, ["f", "g"], "hijack_action_acknowledge_not"
            )
            Helper.hijack_multiple_action(
                db_con, connection, ["f"], "hijack_action_resolve"
            )
            Helper.hijack_multiple_action(
                db_con, connection, ["g"], "hijack_action_ignore"
            )
            # multi-action delete a hijack purged from cache
            Helper.hijack_multiple_action(
                db_con, connection, ["f"], "hijack_action_delete"
            )
            # delete a hijack purged from cache
            Helper.hijack_delete(
                db_con, connection, "g", "139.5.16.0/22", "S|0|-|-", 133676
            )
            # multi-action delete a hijack using cache
            Helper.hijack_multiple_action(
                db_con, connection, ["h"], "hijack_action_delete"
            )
            # delete a hijack using cache
            Helper.hijack_delete(
                db_con, connection, "i", "139.5.24.0/24", "S|0|-|-", 133720
            )
            Helper.hijack_mitigate(db_con, connection, "j", "2001:db8:abcd:12::0/80")
            Helper.load_as_sets(connection)

            time.sleep(10)

            db_cur.close()
            db_con.close()

            for testfile in os.listdir("testfiles/"):
                self.clear()

                self.curr_test = testfile
                self.messages = {}
                # load test
                with open("testfiles/{}".format(testfile), "r") as f:
                    self.messages = json.load(f)

                send_len = len(self.messages)

                with nested(
                    connection.Consumer(
                        self.hijack_queue,
                        callbacks=[self.validate_message],
                        accept=["ujson"],
                    ),
                    connection.Consumer(
                        self.update_queue,
                        callbacks=[self.validate_message],
                        accept=["ujson", "txtjson"],
                    ),
                    connection.Consumer(
                        self.hijack_db_queue,
                        callbacks=[self.validate_message],
                        accept=["ujson", "txtjson"],
                    ),
                ):
                    send_cnt = 0
                    # send and validate all messages in the messages.json file
                    while send_cnt < send_len:
                        self.curr_idx = send_cnt
                        self.send_next_message(connection)
                        send_cnt += 1
                        # sleep until we receive all expected messages
                        while self.curr_idx != send_cnt:
                            time.sleep(0.1)
                            try:
                                connection.drain_events(timeout=10)
                            except socket.timeout:
                                # avoid infinite loop by timeout
                                assert False, "Consumer timeout"

            connection.close()

        with open("configs/config.yaml") as f1, open("configs/config2.yaml") as f2:
            new_data = f2.read()
            old_data = f1.read()

        Helper.change_conf(connection, new_data, old_data, "test")

        time.sleep(5)
        self.supervisor.supervisor.stopAllProcesses()

        self.waitProcess("listener", 0)  # 0 STOPPED
        self.waitProcess("clock", 0)  # 0 STOPPED
        self.waitProcess("detection", 0)  # 0 STOPPED
        self.waitProcess("mitigation", 0)  # 0 STOPPED
        self.waitProcess("configuration", 0)  # 0 STOPPED
        self.waitProcess("database", 0)  # 0 STOPPED
        self.waitProcess("observer", 0)  # 0 STOPPED
Пример #6
0
DEBUG = False
TESTING = False

# Cache
CACHE_REDIS_HOST = 'hommod_redis_1'
CACHE_REDIS_PORT = 6379
CACHE_REDIS_DB = 1
CACHE_EXPIRATION_TIME = 60 * 60 * 24 * 30  # 30 days
CACHE_LOCK_TIMEOUT = 60 * 60  # 1 hour

# Celery
CELERY_ACCEPT_CONTENT = ['pickle', 'json', 'msgpack', 'yaml']
CELERY_BROKER_URL = 'amqp://guest@hommod_rabbitmq_1'
CELERY_DEFAULT_QUEUE = 'hommod'
CELERYD_CONCURRENCY = 20
CELERY_QUEUES = (Queue('hommod', Exchange('hommod'), routing_key='hommod'), )
CELERY_TRACK_STARTED = True
CELERY_RESULT_BACKEND = 'redis://hommod_redis_1/1'

# Time it takes for a model to get outdated:
MAX_MODEL_DAYS = 100

# Services
INTERPRO_URL = 'http://www.ebi.ac.uk/Tools/services/rest/iprscan5'
UNIPROT_URL = 'http://www.uniprot.org/uniprot'

# Directories and File Paths
YASARA_DIR = '/deps/yasara/yasara'
MODEL_DIR = '/data/models/'
BLACKLIST_FILE_PATH = '/data/blacklisted_templates'
DSSP_DIR = '/mnt/chelonium/dssp/'
Пример #7
0
CELERY_RESULT_BACKEND = 'amqp'
CELERY_RESULT_PERSISTENT = True
RABBITMQ_USER = os.environ.get('RABBITMQ_USER')
RABBITMQ_PASSWORD = os.environ.get('RABBITMQ_PASSWORD')
RABBITMQ_HOSTNAME = os.environ.get('RABBITMQ_HOSTNAME')
RABBITMQ_PORT = os.environ.get('RABBITMQ_PORT')
BROKER_URL = 'amqp://{}:{}@{}:{}'.format(RABBITMQ_USER, RABBITMQ_PASSWORD,
                                         RABBITMQ_HOSTNAME, RABBITMQ_PORT)
BROKER_CONNECTION_TIMEOUT = 15
BROKER_CONNECTION_MAX_RETRIES = 5
CELERY_DISABLE_RATE_LIMITS = True
CELERY_TASK_RESULT_EXPIRES = 7200
CELERY_IMPORTS = ('main.appD')
CELERY_DEFAULT_QUEUE = 'default'
CELERY_QUEUES = (
    Queue('default', Exchange('default'), routing_key='default'),
    Queue('gold', Exchange('news_ml'), routing_key='news_ml.gold'),
    Queue('silver', Exchange('news_ml'), routing_key='news_ml.silver'),
    Queue('bronze', Exchange('news_ml'), routing_key='news_ml.bronze'),
)
CELERY_DEFAULT_EXCHANGE = 'news_ml'
CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'
CELERY_DEFAULT_ROUTING_KEY = 'default'
CELERY_TRACK_STARTED = True

CELERY_ROUTES = {
    'process_campaign': {
        'queue': 'gold',
        'routing_key': 'news_ml.gold',
        'exchange': 'news_ml',
    },
Пример #8
0
from __future__ import absolute_import

from kombu import Exchange, Queue

from st2common.transport import publishers

__all__ = [
    'LiveActionPublisher',

    'get_queue',
    'get_status_management_queue'
]


LIVEACTION_XCHG = Exchange('st2.liveaction', type='topic')
LIVEACTION_STATUS_MGMT_XCHG = Exchange('st2.liveaction.status', type='topic')


class LiveActionPublisher(publishers.CUDPublisher, publishers.StatePublisherMixin):

    def __init__(self):
        publishers.CUDPublisher.__init__(self, exchange=LIVEACTION_XCHG)
        publishers.StatePublisherMixin.__init__(self, exchange=LIVEACTION_STATUS_MGMT_XCHG)


def get_queue(name, routing_key):
    return Queue(name, LIVEACTION_XCHG, routing_key=routing_key)


def get_status_management_queue(name, routing_key):
Пример #9
0
CELERY_ALWAYS_EAGER = (env('CELERY_ALWAYS_EAGER', False) == 'True')
CELERY_TASK_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['pickle', 'json']  # it's using pickle when in eager mode
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
CELERYD_TASK_SOFT_TIME_LIMIT = 300
CELERYD_LOG_FORMAT = '%(message)s level=%(levelname)s process=%(processName)s'
CELERYD_TASK_LOG_FORMAT = ' '.join([CELERYD_LOG_FORMAT, 'task=%(task_name)s task_id=%(task_id)s'])

CELERYBEAT_SCHEDULE_FILENAME = env('CELERYBEAT_SCHEDULE_FILENAME', './celerybeatschedule.db')
CELERY_DEFAULT_QUEUE = 'default'
CELERY_DEFAULT_EXCHANGE = 'default'
CELERY_DEFAULT_ROUTING_KEY = 'default'

CELERY_QUEUES = (
    Queue('default', Exchange(CELERY_DEFAULT_EXCHANGE), routing_key=CELERY_DEFAULT_ROUTING_KEY),
    Queue('expiry', Exchange('expiry', type='topic'), routing_key='expiry.#'),
    Queue('legal', Exchange('legal', type='topic'), routing_key='legal.#'),
    Queue('publish', Exchange('publish', type='topic'), routing_key='publish.#'),
)

CELERY_ROUTES = {
    'apps.archive.content_expiry': {
        'queue': 'expiry',
        'routing_key': 'expiry.content'
    },
    'superdesk.io.gc_ingest': {
        'queue': 'expiry',
        'routing_key': 'expiry.ingest'
    },
    'apps.auth.session_purge': {
Пример #10
0
from __future__ import absolute_import, unicode_literals
from datetime import timedelta
import os

from celery import Celery
from kombu import Exchange, Queue

# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'shopelectro.settings.local')

app = Celery('shopelectro')

# Exchanges
default_exchange = Exchange('default', type='direct')
utils_exchange = Exchange('utils', type='direct')

# http://docs.celeryproject.org/en/latest/userguide/tasks.html
task_queues = (Queue(
    name='default',
    exchange=default_exchange,
    routing_key='default',
), Queue(
    name='mail',
    exchange=utils_exchange,
    routing_key='utils.mail',
), Queue(
    name='command',
    exchange=utils_exchange,
    routing_key='utils.command',
))
Пример #11
0
CELERY_ENABLE_UTC = True
CELERY_TIMEZONE = "UTC"
REDIS_HOST = os.environ.get('REDIS_HOST', 'redis')

BROKER_URL = 'redis://{hostname}/0'.format(hostname=REDIS_HOST)

BROKER_POOL_LIMIT = 1
BROKER_CONNECTION_TIMEOUT = 10

# Celery configuration

# configure queues, currently we have only one
CELERY_DEFAULT_QUEUE = 'default'
CELERY_DEFAULT_ROUTING_KEY = 'default'
CELERY_QUEUES = (Queue('default', Exchange('default'),
                       routing_key='default'), )

CELERY_IMPORTS = ("processengine.tasks")
# Sensible settings for celery
CELERY_ALWAYS_EAGER = True
CELERY_ACKS_LATE = True
CELERY_TASK_PUBLISH_RETRY = True
CELERY_DISABLE_RATE_LIMITS = False

# By default we will ignore result
# If you want to see results and try out tasks interactively,
# change it to False
# Or change this setting on tasks level
CELERY_IGNORE_RESULT = False
CELERY_SEND_TASK_ERROR_EMAILS = False
Пример #12
0
from logging import StreamHandler

from django.conf import settings
from celery import current_task
from celery.signals import task_prerun, task_postrun
from kombu import Connection, Exchange, Queue, Producer
from kombu.mixins import ConsumerMixin

from .utils import get_celery_task_log_path

routing_key = 'celery_log'
celery_log_exchange = Exchange('celery_log_exchange', type='direct')
celery_log_queue = [
    Queue('celery_log', celery_log_exchange, routing_key=routing_key)
]


class CeleryLoggerConsumer(ConsumerMixin):
    def __init__(self):
        self.connection = Connection(settings.CELERY_LOG_BROKER_URL)

    def get_consumers(self, Consumer, channel):
        return [
            Consumer(queues=celery_log_queue,
                     accept=['pickle', 'json'],
                     callbacks=[self.process_task])
        ]

    def handle_task_start(self, task_id, message):
        pass
Пример #13
0
    print('Oh no! Task failed: {0!r}'.format(exc))

CELERY_ANNOTATIONS = {'*': {'on_failure': my_on_failure}}

class MyAnnotate(object):

    def annotate(self, task):
        if task.name.startswith('tasks.'):
            return {'rate_limit': '10/s'}

CELERY_ANNOTATIONS = (MyAnnotate(), {…})


CELERY_DEFAULT_QUEUE = 'default'
CELERY_QUEUES = (
    Queue('default', Exchange('default'), routing_key='default'),
    Queue('for_task_A', Exchange('for_task_A'), routing_key='for_task_A'),
    Queue('for_task_B', Exchange('for_task_B'), routing_key='for_task_B'),

    # 路由键以“task.”开头的消息都进default队列
    Queue('default', routing_key='task.#'),

    # 路由键以“web.”开头的消息都进web_tasks队列
    Queue('web_tasks', routing_key='web.#'),
)
CELERY_DEFAULT_EXCHANGE = 'tasks'  # 默认的交换机名字为tasks
CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'  # 默认的交换类型是topic
CELERY_DEFAULT_ROUTING_KEY = 'task.default'  # 默认的路由键是task.default,这个路由键符合上面的default队列

CELERY_ROUTES = {
    'my_taskA': {'queue': 'for_task_A', 'routing_key': 'for_task_A'},
Пример #14
0
        password=os.environ.get('RABBIT_ENV_RABBITMQ_PASS', 'mypass'),
        hostname=RABBIT_HOSTNAME,
        vhost=os.environ.get('RABBIT_ENV_VHOST', ''))

# We don't want to have dead connections stored on rabbitmq, so we have to negotiate using heartbeats
BROKER_HEARTBEAT = '?heartbeat=30'
if not BROKER_URL.endswith(BROKER_HEARTBEAT):
    BROKER_URL += BROKER_HEARTBEAT

#BROKER_POOL_LIMIT = 1
#BROKER_CONNECTION_TIMEOUT = 10

# configure queues, currently we have only one
CELERY_DEFAULT_QUEUE = 'default'
CELERY_QUEUES = (
    Queue('default', Exchange('default'), routing_key='default'),
)

# Sensible settings for celery
CELERY_ALWAYS_EAGER = False
CELERY_ACKS_LATE = True
CELERY_TASK_PUBLISH_RETRY = True
CELERY_DISABLE_RATE_LIMITS = False

CELERY_IGNORE_RESULT = False
CELERY_SEND_TASK_ERROR_EMAILS = False
CELERY_TASK_RESULT_EXPIRES = 600

# Set redis as celery result backend

CELERY_RESULT_BACKEND = 'redis://%s:%d/%d' % (REDIS_HOST, REDIS_PORT, REDIS_DB)
Пример #15
0
 def __init__(self, connection_string, exchange):
     self._connection = BrokerConnection(connection_string)
     self._connections = set([self._connection])  # set of connection for the heartbeat
     self._exchange = Exchange(exchange, durable=True, delivry_mode=2, type='topic')
     monitor_heartbeats(self._connections)
Пример #16
0
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""Indexer for Invenio."""

from __future__ import absolute_import, print_function

from kombu import Exchange, Queue

INDEXER_DEFAULT_INDEX = "records-record-v1.0.0"
"""Default index to use if no schema is defined."""

INDEXER_DEFAULT_DOC_TYPE = "record-v1.0.0"
"""Default doc_type to use if no schema is defined."""

INDEXER_MQ_EXCHANGE = Exchange('indexer', type='direct')
"""Default exchange for message queue."""

INDEXER_MQ_QUEUE = Queue('indexer',
                         exchange=INDEXER_MQ_EXCHANGE,
                         routing_key='indexer')
"""Default queue for message queue."""

INDEXER_MQ_ROUTING_KEY = 'indexer'
"""Default routing key for message queue."""

INDEXER_REPLACE_REFS = True
"""Whether to replace JSONRefs prior to indexing record."""

INDEXER_BULK_REQUEST_TIMEOUT = 10
"""Request timeout to use in Bulk indexing."""
Пример #17
0
    },
    {
        "last": datetime.datetime(2000, 1, 1),
        "timedelta": datetime.timedelta(minutes=10),
        "message": regnum_rss_task_message,
    },
]


def main(out):

    while True:
        for task in TASKS:
            if task['last'] + task['timedelta'] < datetime.datetime.now():
                with Connection() as connection:
                    with connection.channel() as channel:
                        producer = Producer(channel)
                        producer.publish(task['message'], exchange=out)

                task['last'] = datetime.datetime.now()
            continue
        sleep(10)


if __name__ == '__main__':
    # args = parser.parse_args()
    out = Exchange('monitor-out-ex')  #args.output_queue)
    main(out)
else:
    raise Exception(f"File {__name__} can't be import")
Пример #18
0
# Logging
task_store_errors_even_if_ignored = True

# Serialization
task_serializer = "json"
result_serializer = "json"
accept_content = ["json"]

# Performance
worker_disable_rate_limits = True

# Queue config
task_default_queue = "default"
task_queues = (
    # Default queue
    Queue("default", Exchange("default"), routing_key="default"),

    # Misc worker queue
    Queue("worker", Exchange("worker"), routing_key="worker", delivery_mode=1),

    # Matchmaker queue
    Queue("matchmaker",
          Exchange("matchmaker"),
          routing_key="matchmaker",
          delivery_mode=1),

    # Spamless queue
    Queue("spamless",
          Exchange("spamless"),
          routing_key="spamless",
          delivery_mode=1),
Пример #19
0
from kombu import Connection, Exchange, Queue
from kombu.mixins import ConsumerMixin

rabbit_url = "amqp://localhost:5672/"


class Worker(ConsumerMixin):
    def __init__(self, connection, queues):
        self.connection = connection
        self.queues = queues

    def get_consumers(self, Consumer, channel):
        return [Consumer(queues=self.queues, callbacks=[self.on_message])]

    def on_message(self, body, message):
        print('Got message: {0}'.format(body))
        message.ack()


exchange = Exchange("example-exchange", type="direct")
queues = [Queue("example-queue", exchange, routing_key="BOB")]

with Connection(rabbit_url, heartbeat=4) as conn:
    try:
        worker = Worker(conn, queues)
        worker.run()
    except:
        raise
Пример #20
0
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(PROJECT_PATH, 'media')

STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(PROJECT_PATH, 'static')

CELERY_RESULT_BACKEND = 'redis://'
CELERY_TASK_RESULT_EXPIRES = 900  # 15 min

CELERY_DEFAULT_QUEUE = 'default'
CELERY_DEFAULT_ROUTING_KEY = 'default'
CELERY_DEFAULT_EXCHANGE = 'default'
CELERY_DEFAULT_EXCHANGE_TYPE = 'direct'
CELERY_SEND_TASK_ERROR_EMAILS = True

default_exchange = Exchange(CELERY_DEFAULT_EXCHANGE,
                            CELERY_DEFAULT_EXCHANGE_TYPE)
CELERY_QUEUES = (
    Queue(CELERY_DEFAULT_QUEUE,
          exchange=default_exchange,
          routing_key=CELERY_DEFAULT_QUEUE),
    Queue('receiver', exchange=default_exchange, routing_key='receiver'),
    Queue('logger', exchange=default_exchange, routing_key='logger'),
    Queue('control', exchange=default_exchange, routing_key='control'),
    Queue('rabbit', exchange=default_exchange, routing_key='rabbit'),
)

BROKER_URL = None

# Время хранения логов
KEEP_BROKER_LOGS = 7  # количество дней
# Время хранения архива логов
Пример #21
0
from kombu import Queue
from kombu import Exchange
from datetime import timedelta

CELERY_QUEUES = (
    Queue('liked_queue', Exchange('liked'), routing_key='liked'),
    Queue('user_queue', Exchange('user'), routing_key='user'),
)

CELERY_ROUTES = {
    'tasks_liked.liked_add': {
        'queue': 'liked_queue',
        'routing_key': 'liked',
    },
    'tasks_user.user_add': {
        'queue': 'user_queue',
        'routing_key': 'user',
    },
    'tasks_liked.get_liked_tweet': {
        'queue': 'liked_queue',
        'routing_key': 'liked',
    },
    'tasks_user.get_user_tweet': {
        'queue': 'user_queue',
        'routing_key': 'user',
    },
}
Пример #22
0
#默认队列
CELERY_DEFAULT_QUEUE = "default"

#过期时间
CELERY_TASK_RESULT_EXPIRES = 3600

#Topic exchanges例子
#假设有这些routing_key的队列
#[usa.news, usa.weather, norway.news, norway.weather]
#*匹配一个
#*.news匹配[usa.news,norway.news]
##匹配多个单词,顺序问题可能要测依一下
#usa.#匹配[usa.news,usa.weather]

CELERY_QUEUES = (
    Queue('send_email', Exchange('send', type='topic'), routing_key='send.*'),
    Queue('save_db', Exchange('save', type='topic'), routing_key='save.#'),
    Queue('save_redis', Exchange('save', type='topic'), routing_key='save.#'),
    Queue('default'),  #如果设置了default_queue,则必须在queues中添加默认队列,且启动时必须带有默认队列
    Queue('time_task'),
)

#定时任务配置,每两秒执行一次
#启动之后会生成celerybeat-schedule文件用于存储上一次执行的时间
#也可以通过-s参数指定
CELERYBEAT_SCHEDULE = {
    'send-email-every-minite': {    # 定时任务的名字
        'task': 'spider.tasks.tick',     # 具体对应的Task
        'schedule':  timedelta(seconds=2),# 定时设置,这里表示30s执行一次
        'args': (datetime.now().strftime("%x %X"),) ,
    }
Пример #23
0
    def test(self):
        """
        Loads a test file that includes crafted bgp updates as
        input and expected messages as output.
        """
        RABBITMQ_USER = os.getenv("RABBITMQ_USER", "guest")
        RABBITMQ_PASS = os.getenv("RABBITMQ_PASS", "guest")
        RABBITMQ_HOST = os.getenv("RABBITMQ_HOST", "rabbitmq")
        RABBITMQ_PORT = os.getenv("RABBITMQ_PORT", 5672)
        RABBITMQ_URI = "amqp://{}:{}@{}:{}//".format(
            RABBITMQ_USER, RABBITMQ_PASS, RABBITMQ_HOST, RABBITMQ_PORT
        )
        RPKI_VALIDATOR_HOST = os.getenv("RPKI_VALIDATOR_HOST", "routinator")
        RPKI_VALIDATOR_PORT = os.getenv("RPKI_VALIDATOR_PORT", 3323)

        # check RPKI RTR manager connectivity
        while True:
            try:
                rtrmanager = RTRManager(RPKI_VALIDATOR_HOST, RPKI_VALIDATOR_PORT)
                rtrmanager.start()
                print(
                    "Connected to RPKI VALIDATOR '{}:{}'".format(
                        RPKI_VALIDATOR_HOST, RPKI_VALIDATOR_PORT
                    )
                )
                rtrmanager.stop()
                break
            except Exception:
                print(
                    "Could not connect to RPKI VALIDATOR '{}:{}'".format(
                        RPKI_VALIDATOR_HOST, RPKI_VALIDATOR_PORT
                    )
                )
                print("Retrying in 30 seconds...")
                time.sleep(30)

        # exchanges
        self.update_exchange = Exchange(
            "bgp-update", type="direct", durable=False, delivery_mode=1
        )

        self.hijack_exchange = Exchange(
            "hijack-update", type="direct", durable=False, delivery_mode=1
        )

        self.pg_amq_bridge = Exchange(
            "amq.direct", type="direct", durable=True, delivery_mode=1
        )

        # queues
        self.update_queue = Queue(
            "detection-testing",
            exchange=self.pg_amq_bridge,
            routing_key="update-update",
            durable=False,
            auto_delete=True,
            max_priority=1,
            consumer_arguments={"x-priority": 1},
        )

        self.hijack_queue = Queue(
            "hijack-testing",
            exchange=self.hijack_exchange,
            routing_key="update",
            durable=False,
            auto_delete=True,
            max_priority=1,
            consumer_arguments={"x-priority": 1},
        )

        self.hijack_db_queue = Queue(
            "hijack-db-testing",
            exchange=self.pg_amq_bridge,
            routing_key="hijack-update",
            durable=False,
            auto_delete=True,
            max_priority=1,
            consumer_arguments={"x-priority": 1},
        )

        with Connection(RABBITMQ_URI) as connection:
            print("Waiting for pg_amq exchange..")
            Tester.waitExchange(self.pg_amq_bridge, connection.default_channel)
            print("Waiting for hijack exchange..")
            Tester.waitExchange(self.hijack_exchange, connection.default_channel)
            print("Waiting for update exchange..")
            Tester.waitExchange(self.update_exchange, connection.default_channel)

            self.supervisor.supervisor.startAllProcesses()

            # print(
            #     "Sleeping for 60 seconds to allow the RTR server to populate its db..."
            # )
            # time.sleep(60)

            # query database for the states of the processes
            db_con = self.getDbConnection()
            db_cur = db_con.cursor()
            query = "SELECT name FROM process_states WHERE running=True"
            running_modules = set()
            # wait until all 6 modules are running
            while len(running_modules) < 6:
                db_cur.execute(query)
                entries = db_cur.fetchall()
                for entry in entries:
                    running_modules.add(entry[0])
                db_con.commit()
                print("Running modules: {}".format(running_modules))
                print("{}/6 modules are running.".format(len(running_modules)))
                time.sleep(1)

            Tester.config_request_rpc(connection)

            time.sleep(10)

            for testfile in os.listdir("testfiles/"):
                self.clear()

                self.curr_test = testfile
                self.messages = {}
                # load test
                with open("testfiles/{}".format(testfile), "r") as f:
                    self.messages = json.load(f)

                send_len = len(self.messages)

                with nested(
                    connection.Consumer(
                        self.hijack_queue,
                        callbacks=[self.validate_message],
                        accept=["ujson"],
                    ),
                    connection.Consumer(
                        self.update_queue,
                        callbacks=[self.validate_message],
                        accept=["ujson", "txtjson"],
                    ),
                    connection.Consumer(
                        self.hijack_db_queue,
                        callbacks=[self.validate_message],
                        accept=["ujson", "txtjson"],
                    ),
                ):
                    send_cnt = 0
                    # send and validate all messages in the messages.json file
                    while send_cnt < send_len:
                        self.curr_idx = send_cnt
                        self.send_next_message(connection)
                        send_cnt += 1
                        # sleep until we receive all expected messages
                        while self.curr_idx != send_cnt:
                            time.sleep(0.1)
                            try:
                                connection.drain_events(timeout=10)
                            except socket.timeout:
                                # avoid infinite loop by timeout
                                assert False, "Consumer timeout"

            connection.close()

        time.sleep(5)
        self.supervisor.supervisor.stopAllProcesses()

        self.waitProcess("listener", 0)  # 0 STOPPED
        self.waitProcess("clock", 0)  # 0 STOPPED
        self.waitProcess("detection", 0)  # 0 STOPPED
        self.waitProcess("mitigation", 0)  # 0 STOPPED
        self.waitProcess("configuration", 0)  # 0 STOPPED
        self.waitProcess("database", 0)  # 0 STOPPED
        self.waitProcess("observer", 0)  # 0 STOPPED
Пример #24
0
 def publish_message(self, routing_key, message):
     exchange = Exchange('editing_exchange', type='direct')
     file_ready_publisher = message_publisher.Message_publisher(
         self.connection, exchange, routing_key)
     file_ready_publisher.publishMessage(message)
Пример #25
0
CELERY_TASK_SERIALIZER = 'pickle'
CELERY_ACCEPT_CONTENT = ['pickle', 'json']
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERY_TASK_RESULT_EXPIRES = 60 * 60 * 24
CELERYD_MAX_TASKS_PER_CHILD = 40
CELERY_TRACK_STARTED = True
CELERY_ENABLE_UTC = False
CELERY_TIMEZONE = 'Asia/Shanghai'
platforms.C_FORCE_ROOT = True

#celery route config
CELERY_IMPORTS = ("OpsManage.tasks.assets", "OpsManage.tasks.ansible",
                  "OpsManage.tasks.cron", "OpsManage.tasks.deploy",
                  "OpsManage.tasks.sql", "OpsManage.tasks.sched")
CELERY_QUEUES = (
    Queue('default', Exchange('default'), routing_key='default'),
    Queue('ansible', Exchange('ansible'), routing_key='ansible_#'),
)
CELERY_ROUTES = {
    'OpsManage.tasks.sql.*': {
        'queue': 'default',
        'routing_key': 'default'
    },
    'OpsManage.tasks.assets.*': {
        'queue': 'default',
        'routing_key': 'default'
    },
    'OpsManage.tasks.cron.*': {
        'queue': 'default',
        'routing_key': 'default'
    },
Пример #26
0
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings')

app = Celery('{{cookiecutter.project_name}}')

# Using a string here means the worker doesn't have to serialize
# the configuration object to child processes.
# - namespace='CELERY' means all celery-related configuration keys
#   should have a `CELERY_` prefix.
app.config_from_object('django.conf:settings', namespace='CELERY')

# Load task modules from all registered Django app configs.
app.autodiscover_tasks()

default_exchange = Exchange('default', type='direct')
urgent_exchange = Exchange('urgent', type='direct')

app.conf.task_queues = (
    Queue('default', default_exchange, routing_key='default'),
    Queue('urgent', urgent_exchange, routing_key='urgent'),
)
app.conf.task_default_queue = 'default'
app.conf.task_default_exchange = 'default'
app.conf.task_default_routing_key = 'default'

app.conf.beat_schedule = {
    'schedule_debug_task': {
        'task': 'config.celery.debug_task',
        'schedule': crontab(minute='*/15'),  # Execute every 15 minutes.
    },
Пример #27
0
 def test_add_default_exchange(self):
     ex = Exchange('fff', 'fanout')
     q = Queues(default_exchange=ex)
     q.add(Queue('foo'))
     self.assertEqual(q['foo'].exchange, ex)
Пример #28
0
    def __init__(self, types):

        self.BROKER_URL = 'librabbitmq://' + cfg.RABBITMQ_IP + '/' + cfg.CB_CLUSTER_TAG
        self.CELERY_ACKS_LATE = True
        self.CELERYD_PREFETCH_MULTIPLIER = 1
        self.CELERY_TASK_SERIALIZER = 'pickle'
        self.CELERY_DISABLE_RATE_LIMITS = True
        self.CELERY_TASK_RESULT_EXPIRES = 5
        self.CELERY_DEFAULT_EXCHANGE = 'default'
        self.CELERY_DEFAULT_EXCHANGE_TYPE = 'direct'
        self.CELERY_DEFAULT_ROUTING_KEY = 'default'
        self.CB_CLUSTER_TAG = cfg.CB_CLUSTER_TAG
        self.CELERY_DEFAULT_QUEUE = cfg.CB_CLUSTER_TAG
        self.CELERYBEAT_SCHEDULE = {
            'systest_manager': {
                'task': 'app.systest_manager.systestManager',
                'schedule': timedelta(seconds=3),
                'args': ('systest_manager_' + cfg.CB_CLUSTER_TAG, )
            },
        }

        default_ex = Exchange(self.CELERY_DEFAULT_EXCHANGE,
                              routing_key=self.CB_CLUSTER_TAG,
                              auto_delete=True,
                              durable=True)

        self.CELERY_QUEUES = (

            # queue for default routing
            Queue(self.CB_CLUSTER_TAG, default_ex, auto_delete=True),

            # queue for system test-case execution
            self.make_queue('systest_mgr_consumer', 'test.mgr', default_ex),

            # queue for cluster status tasks
            self.make_queue('cluster_status', 'cluster.status', default_ex),
            self.make_queue('phase_status', 'phase.status', default_ex),
            self.make_queue('run_phase', 'run.phase', default_ex),
        )

        self.CELERY_ROUTES = (
            {
                'app.systest_manager.systestManager':
                self.route_args('systest_mgr_consumer', 'test.mgr')
            },
            {
                'app.systest_manager.get_phase_status':
                self.route_args('phase_status', 'phase.status')
            },
            {
                'app.systest_manager.runPhase':
                self.route_args('run_phase', 'run.phase')
            },
            {
                'app.workload_manager.updateClusterStatus':
                self.route_args('cluster_status', 'cluster.status')
            },
        )

        for type_ in types:
            if type_ == "kv" or type_ == "all":
                self.add_kvconfig()
                self.add_kv_ops_manager()
                self.add_report_kv_latency()
            if type_ == "query" or type_ == "all":
                self.add_queryconfig()
            if type_ == "admin" or type_ == "all":
                self.add_adminconfig()

            self.CELERYBEAT_SCHEDULE.update({
                'update_cluster_status': {
                    'task': 'app.workload_manager.updateClusterStatus',
                    'schedule': timedelta(seconds=10),
                },
            })
Пример #29
0
from copy import copy

from kombu import Exchange

__all__ = (
    'Event',
    'event_exchange',
    'get_exchange',
    'group_from',
)

EVENT_EXCHANGE_NAME = 'celeryev'
#: Exchange used to send events on.
#: Note: Use :func:`get_exchange` instead, as the type of
#: exchange will vary depending on the broker connection.
event_exchange = Exchange(EVENT_EXCHANGE_NAME, type='topic')


def Event(type, _fields=None, __dict__=dict, __now__=time.time, **fields):
    """Create an event.

    Notes:
        An event is simply a dictionary: the only required field is ``type``.
        A ``timestamp`` field will be set to the current time if not provided.
    """
    event = __dict__(_fields, **fields) if _fields else fields
    if 'timestamp' not in event:
        event.update(timestamp=__now__(), type=type)
    else:
        event['type'] = type
    return event
Пример #30
0
             'queue': 'comment_crawler',
             'routing_key': 'comment_info'
         }
     },
     'repost_task': {
         'task': 'tasks.repost.excute_repost_task',
         'schedule': timedelta(hours=10),
         'options': {
             'queue': 'repost_crawler',
             'routing_key': 'repost_info'
         }
     },
 },
 CELERY_QUEUES=(
     Queue('login_queue',
           exchange=Exchange('login_queue', type='direct'),
           routing_key='for_login'),
     Queue('user_crawler',
           exchange=Exchange('user_crawler', type='direct'),
           routing_key='for_user_info'),
     Queue('search_crawler',
           exchange=Exchange('search_crawler', type='direct'),
           routing_key='for_search_info'),
     Queue('fans_followers',
           exchange=Exchange('fans_followers', type='direct'),
           routing_key='for_fans_followers'),
     Queue('home_crawler',
           exchange=Exchange('home_crawler', type='direct'),
           routing_key='home_info'),
     Queue('ajax_home_crawler',
           exchange=Exchange('ajax_home_crawler', type='direct'),