def test_good_load_local(self):
     method = simport.load("tests|"
                           "localmodule:Foo.method_a")
     import localmodule
     self.assertEquals(method, localmodule.Foo.method_a)
     self.assertEquals(localmodule.function_a,
             simport.load("localmodule:function_a"))
Example #2
0
    def __init__(self):

        super(Alarming, self).__init__()

        self.events_message_queue = simport.load(cfg.CONF.messaging.driver)("events")

        self.alarm_state_transitions_message_queue = simport.load(cfg.CONF.messaging.driver)("alarm-state-transitions")
Example #3
0
def launch(conf, config_file="/etc/monasca/events_api.conf"):
    log.register_options(cfg.CONF)
    log.set_defaults()
    cfg.CONF(args=[],
             project='monasca_events_api',
             default_config_files=[config_file])
    log.setup(cfg.CONF, 'monasca_events_api')

    app = falcon.API()

    versions = simport.load(cfg.CONF.dispatcher.versions)()
    app.add_route("/", versions)
    app.add_route("/{version_id}", versions)

    events = simport.load(cfg.CONF.dispatcher.events)()
    app.add_route("/v2.0/events", events)
    app.add_route("/v2.0/events/{event_id}", events)

    streams = simport.load(cfg.CONF.dispatcher.stream_definitions)()
    app.add_route("/v2.0/stream-definitions/", streams)
    app.add_route("/v2.0/stream-definitions/{stream_id}", streams)

    transforms = simport.load(cfg.CONF.dispatcher.transforms)()
    app.add_route("/v2.0/transforms", transforms)
    app.add_route("/v2.0/transforms/{transform_id}", transforms)

    LOG.debug('Dispatcher drivers have been added to the routes!')
    return app
Example #4
0
 def __init__(self, alarm_ttl, config):
     self._alarm_ttl = alarm_ttl
     self._statsd = monascastatsd.Client(name='monasca',
                                         dimensions=BaseProcessor.dimensions)
     if 'database' in config and 'repo_driver' in config['database']:
         self._db_repo = simport.load(config['database']['repo_driver'])(config)
     else:
         self._db_repo = simport.load('monasca_notification.common.repositories.mysql.mysql_repo:MysqlRepo')(config)
 def __init__(self):
     self._region = cfg.CONF.region
     self._default_authorized_roles = (
         cfg.CONF.security.default_authorized_roles)
     self._message_queue = (
         simport.load(cfg.CONF.messaging.driver)("transform-definitions"))
     self._transforms_repo = (
         simport.load(cfg.CONF.repositories.transforms)())
Example #6
0
    def __init__(self):

        super(Alarming, self).__init__()

        self.events_message_queue = simport.load(
            cfg.CONF.messaging.driver)('events')

        self.alarm_state_transitions_message_queue = simport.load(
            cfg.CONF.messaging.driver)('alarm-state-transitions')
Example #7
0
 def __init__(self):
     self._region = cfg.CONF.region
     self._default_authorized_roles = cfg.CONF.security.default_authorized_roles
     self._delegate_authorized_roles = cfg.CONF.security.delegate_authorized_roles
     self._post_events_authorized_roles = (
         cfg.CONF.security.default_authorized_roles + cfg.CONF.security.agent_authorized_roles
     )
     self._message_queue = simport.load(cfg.CONF.messaging.driver)("raw-events")
     self._events_repo = simport.load(cfg.CONF.repositories.events)()
Example #8
0
    def __init__(self):
        try:
            super(AlarmsStateHistory, self).__init__()
            self._region = cfg.CONF.region
            self._default_authorized_roles = (
                cfg.CONF.security.default_authorized_roles)
            self._alarms_repo = simport.load(
                cfg.CONF.repositories.alarms_driver)()
            self._metrics_repo = simport.load(
                cfg.CONF.repositories.metrics_driver)()

        except Exception as ex:
            LOG.exception(ex)
            raise exceptions.RepositoryException(ex)
Example #9
0
    def __init__(self):

        super(Notifications, self).__init__()

        self._region = cfg.CONF.region
        self._default_authorized_roles = cfg.CONF.security.default_authorized_roles
        self._notifications_repo = simport.load(cfg.CONF.repositories.notifications_driver)()
Example #10
0
 def __init__(self, **kwargs):
     super(CallbackList, self).__init__(**kwargs)
     self.callbacks = []
     self.config = kwargs
     callback_str = self.config.get('callback_list', "")
     callback_str_list = [x.strip() for x in callback_str.split(",")]
     self.callbacks = [simport.load(c)(**self.config)
                       for c in callback_str_list]
Example #11
0
 def __init__(self, app=None, queue_name=None):
     super(ShoeboxHandler, self).__init__(app, queue_name)
     # Don't use interpolation from ConfigParser ...
     self.config = dict(yagi.config.config.items('shoebox', raw=True))
     roll_checker_str = self.config['roll_checker']
     self.roll_checker = simport.load(roll_checker_str)(**self.config)
     self.working_directory = self.config.get('working_directory', '.')
     self.destination_folder = self.config.get('destination_folder', '.')
     for d in [self.working_directory, self.destination_folder]:
         if not os.path.isdir(d):
             os.makedirs(d)
     template=self.config.get('filename_template',
                              'events_%Y_%m_%d_%X_%f.dat')
     cb = simport.load(self.config['callback'])(**self.config)
     self.roll_manager = roll_manager.WritingRollManager(template,
                             self.roll_checker, self.working_directory,
                             archive_callback=cb)
Example #12
0
def launch(conf, config_file="/etc/monasca/api-config.conf"):
    log.register_options(cfg.CONF)
    log.set_defaults()
    cfg.CONF(args=[],
             project='monasca_api',
             default_config_files=[config_file])
    log.setup(cfg.CONF, 'monasca_api')

    app = falcon.API()

    versions = simport.load(cfg.CONF.dispatcher.versions)()
    app.add_route("/", versions)
    app.add_route("/{version_id}", versions)

    metrics = simport.load(cfg.CONF.dispatcher.metrics)()
    app.add_route("/v2.0/metrics", metrics)

    metrics_measurements = simport.load(
        cfg.CONF.dispatcher.metrics_measurements)()
    app.add_route("/v2.0/metrics/measurements", metrics_measurements)

    metrics_statistics = simport.load(cfg.CONF.dispatcher.metrics_statistics)()
    app.add_route("/v2.0/metrics/statistics", metrics_statistics)

    metrics_names = simport.load(cfg.CONF.dispatcher.metrics_names)()
    app.add_route("/v2.0/metrics/names", metrics_names)

    alarm_definitions = simport.load(cfg.CONF.dispatcher.alarm_definitions)()
    app.add_route("/v2.0/alarm-definitions/", alarm_definitions)
    app.add_route("/v2.0/alarm-definitions/{alarm_definition_id}",
                  alarm_definitions)

    alarms = simport.load(cfg.CONF.dispatcher.alarms)()
    app.add_route("/v2.0/alarms", alarms)
    app.add_route("/v2.0/alarms/{alarm_id}", alarms)

    alarms_state_history = simport.load(
        cfg.CONF.dispatcher.alarms_state_history)()
    app.add_route("/v2.0/alarms/state-history", alarms_state_history)
    app.add_route("/v2.0/alarms/{alarm_id}/state-history",
                  alarms_state_history)

    notification_methods = simport.load(
        cfg.CONF.dispatcher.notification_methods)()
    app.add_route("/v2.0/notification-methods", notification_methods)
    app.add_route("/v2.0/notification-methods/{notification_method_id}",
                  notification_methods)

    LOG.debug('Dispatcher drivers have been added to the routes!')
    return app
Example #13
0
def launch(conf, config_file="/etc/monasca/api-config.conf"):
    log.register_options(cfg.CONF)
    log.set_defaults()
    cfg.CONF(args=[],
             project='monasca_api',
             default_config_files=[config_file])
    log.setup(cfg.CONF, 'monasca_api')

    app = falcon.API()

    versions = simport.load(cfg.CONF.dispatcher.versions)()
    app.add_route("/", versions)
    app.add_route("/{version_id}", versions)

    metrics = simport.load(cfg.CONF.dispatcher.metrics)()
    app.add_route("/v2.0/metrics", metrics)

    metrics_measurements = simport.load(
        cfg.CONF.dispatcher.metrics_measurements)()
    app.add_route("/v2.0/metrics/measurements", metrics_measurements)

    metrics_statistics = simport.load(cfg.CONF.dispatcher.metrics_statistics)()
    app.add_route("/v2.0/metrics/statistics", metrics_statistics)

    metrics_names = simport.load(cfg.CONF.dispatcher.metrics_names)()
    app.add_route("/v2.0/metrics/names", metrics_names)

    alarm_definitions = simport.load(cfg.CONF.dispatcher.alarm_definitions)()
    app.add_route("/v2.0/alarm-definitions/", alarm_definitions)
    app.add_route("/v2.0/alarm-definitions/{alarm_definition_id}",
                  alarm_definitions)

    alarms = simport.load(cfg.CONF.dispatcher.alarms)()
    app.add_route("/v2.0/alarms", alarms)
    app.add_route("/v2.0/alarms/{alarm_id}", alarms)

    alarms_state_history = simport.load(
        cfg.CONF.dispatcher.alarms_state_history)()
    app.add_route("/v2.0/alarms/state-history", alarms_state_history)
    app.add_route("/v2.0/alarms/{alarm_id}/state-history",
                  alarms_state_history)

    notification_methods = simport.load(
        cfg.CONF.dispatcher.notification_methods)()
    app.add_route("/v2.0/notification-methods", notification_methods)
    app.add_route("/v2.0/notification-methods/{notification_method_id}",
                  notification_methods)

    LOG.debug('Dispatcher drivers have been added to the routes!')
    return app
Example #14
0
    def __init__(self):

        super(Notifications, self).__init__()

        self._region = cfg.CONF.region
        self._default_authorized_roles = (
            cfg.CONF.security.default_authorized_roles)
        self._notifications_repo = simport.load(
            cfg.CONF.repositories.notifications_driver)()
Example #15
0
    def __init__(self):
        try:
            super(Metrics, self).__init__()
            self._region = cfg.CONF.region
            self._default_authorized_roles = (
                cfg.CONF.security.default_authorized_roles)
            self._delegate_authorized_roles = (
                cfg.CONF.security.delegate_authorized_roles)
            self._post_metrics_authorized_roles = (
                cfg.CONF.security.default_authorized_roles +
                cfg.CONF.security.agent_authorized_roles)
            self._message_queue = simport.load(cfg.CONF.messaging.driver)(
                'metrics')
            self._metrics_repo = simport.load(
                cfg.CONF.repositories.metrics_driver)()

        except Exception as ex:
            LOG.exception(ex)
            raise falcon.HTTPInternalServerError('Service unavailable',
                                                 ex.message)
Example #16
0
    def __init__(self):
        try:
            super(Metrics, self).__init__()
            self._region = cfg.CONF.region
            self._default_authorized_roles = (
                cfg.CONF.security.default_authorized_roles)
            self._delegate_authorized_roles = (
                cfg.CONF.security.delegate_authorized_roles)
            self._post_metrics_authorized_roles = (
                cfg.CONF.security.default_authorized_roles +
                cfg.CONF.security.agent_authorized_roles)
            self._message_queue = simport.load(
                cfg.CONF.messaging.driver)('metrics')
            self._metrics_repo = simport.load(
                cfg.CONF.repositories.metrics_driver)()

        except Exception as ex:
            LOG.exception(ex)
            raise falcon.HTTPInternalServerError('Service unavailable',
                                                 ex.message)
Example #17
0
    def __init__(self, app=None, queue_name=None):
        super(ShoeboxHandler, self).__init__(app, queue_name)
        # Don't use interpolation from ConfigParser ...
        self.config = dict(yagi.config.config.items('shoebox', raw=True))
        roll_checker_str = self.config.get('roll_checker')
        self.roll_checker = None
        if roll_checker_str:
            self.roll_checker = simport.load(roll_checker_str)(**self.config)
        self.working_directory = self.config.get('working_directory', '.')
        self.destination_folder = self.config.get('destination_folder', '.')
        for d in [self.working_directory, self.destination_folder]:
            if not os.path.isdir(d):
                os.makedirs(d)
        template = self.config.get('filename_template',
                                   'events_%Y_%m_%d_%X_%f.dat')
        callback_str = self.config.get('callback')
        cb = None
        if callback_str:
            cb = simport.load(callback_str)(**self.config)

        roll_manager_str = self.config.get(
            'roll_manager', 'shoebox.roll_manager:WritingRollManager')

        self.wrap_payload_with_region = self.config.get(
            'wrap_payload_with_region', 'False') == 'True'

        self.region = self.config.get('wrap_region', 'n/a')
        self.cell = self.config.get('wrap_cell', 'n/a')

        # Hack(sandy): These sorts of parameters should be left to the
        # callback handlers. Just need it here to get over the hump.
        # Needs to be refactored.
        roll_size_mb = self.config.get('roll_size_mb', 1000)

        self.roll_manager = simport.load(roll_manager_str)(
            template,
            self.roll_checker,
            directory=self.working_directory,
            destination_directory=self.destination_folder,
            archive_callback=cb,
            roll_size_mb=roll_size_mb)
    def __init__(self):

        try:
            self._region = cfg.CONF.region

            self._default_authorized_roles = (
                cfg.CONF.security.default_authorized_roles)
            self._delegate_authorized_roles = (
                cfg.CONF.security.delegate_authorized_roles)
            self._post_authorized_roles = (
                cfg.CONF.security.default_authorized_roles +
                cfg.CONF.security.agent_authorized_roles)

            self._stream_definitions_repo = (
                simport.load(cfg.CONF.repositories.streams)())
            self.stream_definition_event_message_queue = (
                simport.load(cfg.CONF.messaging.driver)('stream-definitions'))

        except Exception as ex:
            LOG.exception(ex)
            raise exceptions.RepositoryException(ex)
    def test_good_load_external(self):
        method = simport.load("tests/external|"
                              "external.externalmodule:Blah.method_b")

        self.assertTrue('external.externalmodule' in sys.modules)
        old = sys.modules['external.externalmodule']
        import external.externalmodule

        self.assertEqual(external.externalmodule,
                         sys.modules['external.externalmodule'])
        self.assertEqual(old, external.externalmodule)
        self.assertEqual(method, external.externalmodule.Blah.method_b)
Example #20
0
    def __init__(self, app=None, queue_name=None):
        super(ShoeboxHandler, self).__init__(app, queue_name)
        # Don't use interpolation from ConfigParser ...
        self.config = dict(yagi.config.config.items('shoebox', raw=True))
        roll_checker_str = self.config.get('roll_checker')
        self.roll_checker = None
        if roll_checker_str:
            self.roll_checker = simport.load(roll_checker_str)(**self.config)
        self.working_directory = self.config.get('working_directory', '.')
        self.destination_folder = self.config.get('destination_folder', '.')
        for d in [self.working_directory, self.destination_folder]:
            if not os.path.isdir(d):
                os.makedirs(d)
        template=self.config.get('filename_template',
                                 'events_%Y_%m_%d_%X_%f.dat')
        callback_str = self.config.get('callback')
        cb = None
        if callback_str:
            cb = simport.load(callback_str)(**self.config)

        roll_manager_str = self.config.get('roll_manager',
                                    'shoebox.roll_manager:WritingRollManager')

        self.wrap_payload_with_region = self.config.get(
                            'wrap_payload_with_region', 'False') == 'True'

        self.region = self.config.get('wrap_region', 'n/a')
        self.cell = self.config.get('wrap_cell', 'n/a')

        # Hack(sandy): These sorts of parameters should be left to the
        # callback handlers. Just need it here to get over the hump.
        # Needs to be refactored.
        roll_size_mb = self.config.get('roll_size_mb', 1000)

        self.roll_manager = simport.load(roll_manager_str)(template,
                        self.roll_checker, directory=self.working_directory,
                        destination_directory=self.destination_folder,
                        archive_callback=cb, roll_size_mb=roll_size_mb)
    def get_kafka_stream(topic, streaming_context):
        offset_specifications = simport.load(cfg.CONF.repositories.offsets)()
        app_name = streaming_context.sparkContext.appName
        saved_offset_spec = offset_specifications.get_kafka_offsets(app_name)
        if len(saved_offset_spec) < 1:

            MonMetricsKafkaProcessor.log_debug(
                "No saved offsets available..."
                "connecting to kafka without specifying offsets")
            kvs = KafkaUtils.createDirectStream(
                streaming_context, [topic],
                {"metadata.broker.list": cfg.CONF.messaging.brokers})

            return kvs

        else:
            from_offsets = {}
            for key, value in saved_offset_spec.items():
                if key.startswith("%s_%s" % (app_name, topic)):
                    # spec_app_name = value.get_app_name()
                    spec_topic = value.get_topic()
                    spec_partition = int(value.get_partition())
                    # spec_from_offset = value.get_from_offset()
                    spec_until_offset = value.get_until_offset()
                    # composite_key = "%s_%s_%s" % (spec_app_name,
                    #                               spec_topic,
                    #                               spec_partition)
                    # partition = saved_offset_spec[composite_key]
                    from_offsets[
                        TopicAndPartition(spec_topic, spec_partition)
                    ] = long(spec_until_offset)

            MonMetricsKafkaProcessor.log_debug(
                "get_kafka_stream: calling createDirectStream :"
                " topic:{%s} : start " % topic)
            for key, value in from_offsets.items():
                MonMetricsKafkaProcessor.log_debug(
                    "get_kafka_stream: calling createDirectStream : "
                    "offsets : TopicAndPartition:{%s,%s}, value:{%s}" %
                    (str(key._topic), str(key._partition), str(value)))
            MonMetricsKafkaProcessor.log_debug(
                "get_kafka_stream: calling createDirectStream : "
                "topic:{%s} : done" % topic)

            kvs = KafkaUtils.createDirectStream(
                streaming_context, [topic],
                {"metadata.broker.list": cfg.CONF.messaging.brokers},
                from_offsets)
            return kvs
 def _load_plugins(cls, plug_map, defaults=None):
     plugins = dict()
     if defaults is not None:
         plugins.update(defaults)
     for name, cls_string in plug_map.items():
         try:
             plugins[name] = simport.load(cls_string)
         except simport.ImportFailed as e:
             log.error("Could not load plugin %s: Import failed. %s" %
                       (name, e))
         except (simport.MissingMethodOrFunction, simport.MissingModule,
                 simport.BadDirectory) as e:
             log.error("Could not load plugin %s: Not found. %s" %
                       (name, e))
     return plugins
    def save_kafka_offsets(current_offsets, app_name,
                           batch_time_info):
        """save current offsets to offset specification."""

        offset_specs = simport.load(cfg.CONF.repositories.offsets)()

        for o in current_offsets:
            MonMetricsKafkaProcessor.log_debug(
                "saving: OffSetRanges: %s %s %s %s, "
                "batch_time_info: %s" % (
                    o.topic, o.partition, o.fromOffset, o.untilOffset,
                    str(batch_time_info)))
        # add new offsets, update revision
        offset_specs.add_all_offsets(app_name,
                                     current_offsets,
                                     batch_time_info)
 def _load_plugins(cls, plug_map, defaults=None):
     plugins = dict()
     if defaults is not None:
         plugins.update(defaults)
     for name, cls_string in plug_map.items():
         try:
             plugins[name] = simport.load(cls_string)
         except simport.ImportFailed as e:
             log.error("Could not load plugin %s: Import failed. %s" % (
                       name, e))
         except (simport.MissingMethodOrFunction,
                 simport.MissingModule,
                 simport.BadDirectory) as e:
             log.error("Could not load plugin %s: Not found. %s" % (
                       name, e))
     return plugins
    def get_processing_offset_range_list(processing_time):
        """get offset range to fetch data from. The
        range will last from the last saved offsets to current offsets
        available. If there are no last saved offsets available in the
        database the starting offsets will be set to the earliest
        available in kafka.
        """

        offset_specifications = simport.load(cfg.CONF.repositories.offsets)()

        # get application name, will be used to get offsets from database
        app_name = PreHourlyProcessor.get_app_name()

        saved_offset_spec = offset_specifications.get_kafka_offsets(app_name)

        # get kafka topic to fetch data
        topic = PreHourlyProcessor.get_kafka_topic()

        offset_range_list = []
        if len(saved_offset_spec) < 1:

            PreHourlyProcessor.log_debug(
                "No saved offsets available..."
                "connecting to kafka and fetching "
                "from earliest available offset ...")

            offset_range_list = PreHourlyProcessor._get_new_offset_range_list(
                cfg.CONF.messaging.brokers,
                topic)
        else:
            PreHourlyProcessor.log_debug(
                "Saved offsets available..."
                "connecting to kafka and fetching from saved offset ...")

            offset_range_list = PreHourlyProcessor._get_offset_range_list(
                cfg.CONF.messaging.brokers,
                topic,
                app_name,
                saved_offset_spec)
        return offset_range_list
Example #26
0
File: routes.py Project: l4p4/surl
    def __init__(self):
        _refs = util.load_references()
        '''base'''
        _base = simport.load(_refs['api_v1_base'])()
        '''managers'''
        _manager_user = simport.load(_refs['api_v1_manager_user'])()
        _manager_url = simport.load(_refs['api_v1_manager_url'])()
        '''stats'''
        _system_stats = simport.load(_refs['api_v1_system_stats'])()
        _user_stats = simport.load(_refs['api_v1_user_stats'])()
        _url_stats = simport.load(_refs['api_v1_url_stats'])()

        self.map_resources = {
            "/": _base,
            '/users': _manager_user,
            '/user/{userId}': _manager_user,
            '/urls/{id}': _manager_url,
            '/users/{userId}/urls': _manager_url,
            '/stats': _system_stats,
            '/stats/{id}': _url_stats,
            '/users/{userId}/stats': _user_stats
        }
Example #27
0
def launch(conf, config_file="/etc/monasca/api-config.conf"):
    log.register_options(cfg.CONF)
    log.set_defaults()
    cfg.CONF(args=[],
             project='monasca_api',
             default_config_files=[config_file])
    log.setup(cfg.CONF, 'monasca_api')

    app = falcon.API()

    versions = simport.load(cfg.CONF.dispatcher.versions)()
    app.add_route("/", versions)
    app.add_route("/{version_id}", versions)

    # The following resource is a workaround for a regression in falcon 0.3
    # which causes the path '/v2.0' to not route to the versions resource
    version_2_0 = simport.load(cfg.CONF.dispatcher.version_2_0)()
    app.add_route("/v2.0", version_2_0)

    metrics = simport.load(cfg.CONF.dispatcher.metrics)()
    app.add_route("/v2.0/metrics", metrics)

    metrics_measurements = simport.load(
        cfg.CONF.dispatcher.metrics_measurements)()
    app.add_route("/v2.0/metrics/measurements", metrics_measurements)

    metrics_statistics = simport.load(cfg.CONF.dispatcher.metrics_statistics)()
    app.add_route("/v2.0/metrics/statistics", metrics_statistics)

    metrics_names = simport.load(cfg.CONF.dispatcher.metrics_names)()
    app.add_route("/v2.0/metrics/names", metrics_names)

    alarm_definitions = simport.load(cfg.CONF.dispatcher.alarm_definitions)()
    app.add_route("/v2.0/alarm-definitions/", alarm_definitions)
    app.add_route("/v2.0/alarm-definitions/{alarm_definition_id}",
                  alarm_definitions)

    alarms = simport.load(cfg.CONF.dispatcher.alarms)()
    app.add_route("/v2.0/alarms", alarms)
    app.add_route("/v2.0/alarms/{alarm_id}", alarms)

    alarm_count = simport.load(cfg.CONF.dispatcher.alarms_count)()
    app.add_route("/v2.0/alarms/count/", alarm_count)

    alarms_state_history = simport.load(
        cfg.CONF.dispatcher.alarms_state_history)()
    app.add_route("/v2.0/alarms/state-history", alarms_state_history)
    app.add_route("/v2.0/alarms/{alarm_id}/state-history",
                  alarms_state_history)

    notification_methods = simport.load(
        cfg.CONF.dispatcher.notification_methods)()
    app.add_route("/v2.0/notification-methods", notification_methods)
    app.add_route("/v2.0/notification-methods/{notification_method_id}",
                  notification_methods)

    LOG.debug('Dispatcher drivers have been added to the routes!')
    return app
Example #28
0
 def init():
     # object to keep track of offsets
     KafkaMessageAdapterPreHourly.adapter_impl = simport.load(
         cfg.CONF.messaging.adapter_pre_hourly)()
Example #29
0
def launch(conf, config_file="/etc/monasca/api-config.conf"):
    log.register_options(cfg.CONF)
    log.set_defaults()
    cfg.CONF(args=[],
             project='monasca_api',
             default_config_files=[config_file])
    log.setup(cfg.CONF, 'monasca_api')

    app = falcon.API()

    versions = simport.load(cfg.CONF.dispatcher.versions)()
    app.add_route("/", versions)
    app.add_route("/{version_id}", versions)

    # The following resource is a workaround for a regression in falcon 0.3
    # which causes the path '/v2.0' to not route to the versions resource
    version_2_0 = simport.load(cfg.CONF.dispatcher.version_2_0)()
    app.add_route("/v2.0", version_2_0)

    metrics = simport.load(cfg.CONF.dispatcher.metrics)()
    app.add_route("/v2.0/metrics", metrics)

    metrics_measurements = simport.load(
        cfg.CONF.dispatcher.metrics_measurements)()
    app.add_route("/v2.0/metrics/measurements", metrics_measurements)

    metrics_statistics = simport.load(cfg.CONF.dispatcher.metrics_statistics)()
    app.add_route("/v2.0/metrics/statistics", metrics_statistics)

    metrics_names = simport.load(cfg.CONF.dispatcher.metrics_names)()
    app.add_route("/v2.0/metrics/names", metrics_names)

    alarm_definitions = simport.load(cfg.CONF.dispatcher.alarm_definitions)()
    app.add_route("/v2.0/alarm-definitions/", alarm_definitions)
    app.add_route("/v2.0/alarm-definitions/{alarm_definition_id}",
                  alarm_definitions)

    alarms = simport.load(cfg.CONF.dispatcher.alarms)()
    app.add_route("/v2.0/alarms", alarms)
    app.add_route("/v2.0/alarms/{alarm_id}", alarms)

    alarm_count = simport.load(cfg.CONF.dispatcher.alarms_count)()
    app.add_route("/v2.0/alarms/count/", alarm_count)

    alarms_state_history = simport.load(
        cfg.CONF.dispatcher.alarms_state_history)()
    app.add_route("/v2.0/alarms/state-history", alarms_state_history)
    app.add_route("/v2.0/alarms/{alarm_id}/state-history",
                  alarms_state_history)

    notification_methods = simport.load(
        cfg.CONF.dispatcher.notification_methods)()
    app.add_route("/v2.0/notification-methods", notification_methods)
    app.add_route("/v2.0/notification-methods/{notification_method_id}",
                  notification_methods)

    LOG.debug('Dispatcher drivers have been added to the routes!')
    return app
    def test_import_class(self):
        klass = simport.load("tests/external|"
                             "external.externalmodule:Blah")
        import external.externalmodule

        self.assertEqual(klass, external.externalmodule.Blah)
Example #31
0
 def init():
     # object to keep track of offsets
     DummyAdapter.adapter_impl = simport.load(
         "tests.unit.messaging.adapter:DummyAdapter")()
 def test_good_load_internal(self):
     self.assertEqual(dummy_function,
                      simport.load("test_simport:dummy_function"))
     self.assertEqual(DummyClass.method_a,
                      simport.load("test_simport:DummyClass.method_a"))
 def test_local_class(self):
     klass = simport.load("LocalClass", __name__)
     self.assertEqual(klass, LocalClass)
Example #34
0
def load_healthcheck_resource(app):
    healthchecks = simport.load(CONF.dispatcher.healthchecks)()
    app.add_route(uri_map.HEALTHCHECK_URI, healthchecks)
Example #35
0
def load_logs_resource(app):
    logs = simport.load(CONF.dispatcher.logs)()
    app.add_route(uri_map.V2_LOGS_URI, logs)

    logs_v3 = simport.load(CONF.dispatcher.logs_v3)()
    app.add_route(uri_map.V3_LOGS_URI, logs_v3)
Example #36
0
def load_versions_resource(app):
    versions = simport.load(CONF.dispatcher.versions)()
    app.add_route("/", versions)
    app.add_route("/{version_id}", versions)
Example #37
0
def load_logs_resource(app):
    logs = simport.load(CONF.dispatcher.logs)()
    app.add_route('/v2.0/log/single', logs)
 def reset_kafka_offsets(app_name):
     """delete all offsets from the offset specification."""
     # get the offsets from global var
     offset_specs = simport.load(cfg.CONF.repositories.offsets)()
     offset_specs.delete_all_kafka_offsets(app_name)