Пример #1
0
    def __init__(self, meta_configs, stanza_configs):
        self._meta_configs = meta_configs
        self._stanza_configs = stanza_configs
        self._task_configs = []
        self._kafka_topics = None
        self._server_info = sc.ServerInfo(self._meta_configs[c.server_uri],
                                          self._meta_configs[c.session_key])
        setup_signal_handler(None, self)
        self._dispatch_errors = None
        self._servername, self._port = extract_hostname_port(
            self._meta_configs[c.server_uri])

        self._store = create_state_store(self._meta_configs, self._appname,
                                         self._server_info)

        conf_mgr = cm.ConfManager(self._meta_configs[c.server_uri],
                                  self._meta_configs[c.session_key])

        global_settings = conf_mgr.all_stanzas_as_dicts(c.myta_conf,
                                                        do_reload=True)
        kafka_clusters = self._get_kafka_clusters(conf_mgr, global_settings)

        log.Logs().set_level(global_settings[c.global_settings].get(
            c.log_level, "INFO"))
        if kafka_clusters:
            self._retry_dispatch_tasks(global_settings, kafka_clusters)
            # Refresh kafka clusters
            kafka_clusters = self._get_kafka_clusters(conf_mgr,
                                                      global_settings)
            self._kafka_clusters = kafka_clusters
            self._get_task_configs(global_settings, kafka_clusters)
        else:
            logger.info("No Kafka cluster are configured")
Пример #2
0
 def __init__(self, meta_config, client_schema):
     self._meta_config = meta_config
     self._task_configs = []
     self._client_schema = client_schema
     self._server_info = sc.ServerInfo(meta_config[c.server_uri],
                                       meta_config[c.session_key])
     self._all_conf_contents = {}
     self._get_division_settings = {}
     self._load_task_configs()
     self._log_level = self._get_log_level()
Пример #3
0
 def __init__(self, meta_config, client_schema, log_suffix=None,
              stanza_name=None, input_type=None,
              single_instance=True):
     self._meta_config = meta_config
     self._stanza_name = stanza_name
     self._input_type = input_type
     self._log_suffix = log_suffix
     self._single_instance = single_instance
     self._task_configs = []
     self._client_schema = client_schema
     self._server_info = sc.ServerInfo(meta_config[c.server_uri],
                                       meta_config[c.session_key])
     self._all_conf_contents = {}
     self._get_division_settings = {}
     self.set_logging()
     self._load_task_configs()
 def __init__(self, meta_config, client_schema, stanza_name=None, log_suffix=None):
     self._meta_config = meta_config
     self._stanza_name = stanza_name
     self._log_suffix = log_suffix
     if self._stanza_name and self._log_suffix:
         stulog.reset_logger(
             self._log_suffix
             + "_"
             + th.format_input_name_for_file(self._stanza_name)
         )
         stulog.logger.info("Start {} task".format(self._stanza_name))
     self._task_configs = []
     self._client_schema = client_schema
     self._server_info = sc.ServerInfo(
         meta_config[c.server_uri], meta_config[c.session_key]
     )
     self._all_conf_contents = {}
     self._get_division_settings = {}
     self._load_task_configs()
     self._log_level = self._get_log_level()
Пример #5
0
            def __iter__(self):
                for c in self.creds:
                    if c.realm.startswith('__REST_CREDENTIAL__#'):
                        continue

                    yield AwsAccessKey(c.username, c.clear_password, c.realm)

                try:
                    requests.get('http://169.254.169.254/latest/meta-data/', timeout=2)
                except IOError:
                    logger.debug('Not running on EC2 instance, skip instance role discovery.')
                    raise StopIteration()

                server_info = sc.ServerInfo(scc.getMgmtUri(), self._session_key)
                if not server_info.is_cloud_instance():
                    cred = get_ec2_iam_role_creds()
                    if cred:
                        yield AwsAccessKey(
                            cred["AccessKeyId"], cred["SecretAccessKey"],
                            cred["Name"], cred["RegionCategory"],
                            cred["Token"], cred["AccountId"], 1)
Пример #6
0
    def is_dispatcher(self, server_info=None, forwarders=None):
        if not forwarders:
            conf_mgr = tcm.TAConfManager(c.myta_forwarder_conf,
                                         self._meta_configs[c.server_uri],
                                         self._meta_configs[c.session_key],
                                         appname=self._appname)
            forwarders = conf_mgr.all(return_acl=False)

        if not forwarders:
            return False

        if not server_info:
            server_info = sc.ServerInfo(self._meta_configs[c.server_uri],
                                        self._meta_configs[c.session_key])

        if server_info.is_shc_member():
            # In SHC env, only captain is able to dispatch the tasks
            if not server_info.is_captain():
                logger.info("This SH is not captain, ignore task dispatching")
                return False
        return True
Пример #7
0
def run(ucc_setting):
    """
    Splunk TA Modualr Input entry.
    """
    # Clean http proxy environ
    utils.remove_http_proxy_env_vars()

    # Set log level
    stulog.set_log_level("INFO")

    meta_configs, _ = modinput.get_modinput_configs_from_stdin()
    server_info = sc.ServerInfo(meta_configs["server_uri"],
                                meta_configs["session_key"])

    if server_info.is_shc_member() and not server_info.is_captain():
        # In SHC env, only captain is able to refresh all tokens
        stulog.logger.info("message=\"{title} will exit\" "
                           "detail_info=\"This search header is not captain\""
                           .format(title=ucc_setting["title"]))
        return

    app_name = ssp.get_appname_from_path(
        os.path.abspath(_get_modular_input_file()))
    assert app_name, UCCServerException("app_name is None.")

    ucc_config = UCCConfig(splunkd_uri=meta_configs["server_uri"],
                           session_key=meta_configs["session_key"],
                           schema=json.dumps(ucc_setting["config"]),
                           user="******",
                           app=app_name)
    ucc_config_loader = UCCServerConfigLoader(
        ucc_config,
        id_locator=ucc_setting["meta.id"],
        logging_locator=ucc_setting["meta.logging"],
        local_forwarder_locator=ucc_setting["meta.local_forwarder"],
        forwarders_snapshot_locator=ucc_setting["meta.forwarder_snapshot"],
        dispatch_snapshot_locator=ucc_setting["meta.dispatch_snapshot"])

    ucc_server_id = ucc_config_loader.get_ucc_server_id(create_if_empty=False)
    if not ucc_server_id and not server_info.is_shc_member():
        ucc_config_loader.enable_local_forwarder()

    # force to get again
    ucc_server_id = ucc_config_loader.get_ucc_server_id(create_if_empty=True)
    ucc_server = UCCServer(
        meta_configs["server_uri"],
        meta_configs["session_key"],
        ucc_config_loader.load_ucc_server_input,
        [_get_local_conf_dir() + _file for _file in
         ucc_setting["monitor_file"]],
        ucc_setting["filter"],
        ucc_setting["division"],
        ucc_setting["dispatch"],
        ucc_config_loader.get_forwarders_snapshot,
        ucc_config_loader.update_forwarders_snapshot,
        ucc_config_loader.get_dispatch_snapshot,
        ucc_config_loader.update_dispatch_snapshot,
        ucc_server_id,
        get_log_level_callback=ucc_config_loader.get_ucc_server_log_level)
    # Setup signal handler
    _setup_signal(ucc_server, ucc_setting)
    # Start ucc server
    ucc_server.start()