Esempio n. 1
0
 def get_global_is_secure(session_key):
     conf_manager = ConfManager(scc.getMgmtUri(), session_key)
     conn = conf_manager.get_stanza('aws_global_settings',
                                    'aws_connection',
                                    do_reload=False)
     return (utils.is_true(conn["is_secure"]),
             utils.is_true(conn["verify_certificates"]))
Esempio n. 2
0
def create_event_writer(config, process_safe=False):
    if scutil.is_true(config.get("use_hec")):
        return HecEventWriter(config)
    elif scutil.is_true(config.get("use_raw_hec")):
        return RawHecEventWriter(config)
    else:
        return ModinputEventWriter(process_safe=process_safe)
Esempio n. 3
0
    def _use_multiprocess(self):
        if not self._task_configs:
            return False

        single_instance = use_single_instance()
        use_process = self._task_configs[0].get(c.use_multiprocess_consumer)
        return (scutil.is_true(single_instance) and len(self._task_configs) > 1
                and scutil.is_true(use_process))
def create_event_writer(config, process_safe):
    use_hec = scutil.is_true(config.get(tac.use_hec))
    use_raw_hec = scutil.is_true(config.get(tac.use_raw_hec))
    if use_hec or use_raw_hec:
        # if use hec, leave each worker process/thread to create event writer
        event_writer = None
    else:
        event_writer = ew.create_event_writer(config, process_safe)
        event_writer.start()
    return event_writer
Esempio n. 5
0
def get_state_store(
    meta_configs,
    appname,
    collection_name="talib_states",
    use_kv_store=False,
    use_cached_store=False,
):
    if util.is_true(use_kv_store):
        return StateStore(meta_configs, appname, collection_name)
    elif util.is_true(use_cached_store):
        return CachedFileStateStore(meta_configs, appname)
    else:
        return FileStateStore(meta_configs, appname)
Esempio n. 6
0
def create_event_writer(config, process_safe=False, use_proxy=False):
    if not use_proxy:
        config = copy.copy(config)
        config["proxy_url"] = None

    if scutil.is_true(config.get("use_hec")):
        config["http"] = sr.HttpPoolManager(config).pool()
        return HecEventWriter(config)
    elif scutil.is_true(config.get("use_raw_hec")):
        config["http"] = sr.HttpPoolManager(config).pool()
        return RawHecEventWriter(config)
    else:
        return ModinputEventWriter(process_safe=process_safe)
 def _parse(self):
     self._remove_files_when_done = scutil.is_true(
         self.input_item.get('remove_files_when_done', '0'))
     self._exclude_describe_events = scutil.is_true(
         self.input_item.get('exclude_describe_events', '1'))
     blacklist = self.input_item.get('blacklist', '^(?:Describe|List|Get)')
     self._blacklist = blacklist \
         if (blacklist and self._exclude_describe_events) else None
     self._blacklist_pattern = re.compile(self._blacklist) \
         if self._blacklist is not None else None
     logger.debug('Blacklist for eventNames',
                  datainput=self.input_name,
                  regex=self._blacklist or '')
     self._excluded_events_index = \
         self.input_item.get('excluded_events_index')
    def prepare(self, app, config):
        settings = config.load('aws_sqs')

        # Set Logging
        level = settings['logging']['log_level']
        set_log_level(level)

        inputs = config.load('aws_sqs_tasks')

        # If config is empty, do nothing and return.
        if not inputs:
            logger.info('No Task Configured')
            return

        logger.debug('AWS SQS Input Discover')

        # Set Proxy
        proxy = ProxySettings.load(config)
        proxy.hook_boto3_get_proxies()

        scheduler = app.create_task_scheduler(self.perform)

        # Generate Tasks
        for name, item in inputs.items():
            if scutil.is_true(item.get('disabled', '0')):
                continue
            item['datainput'] = name
            self.generate_tasks(name, item, scheduler)

        scheduler.run([app.is_aborted, config.has_expired])
        return 0
Esempio n. 9
0
def get_global_settings(server_uri,
                        session_key,
                        user="******",
                        app=ggc.splunk_ta_google):
    """
    :param: get global settings for global settings
    :return: a dict of dict which contains global settings .
    """

    url = ("{server_uri}/servicesNS/{user}/{app}/splunk_ta_google"
           "/google_settings?--get-clear-credential--=1").format(
               server_uri=server_uri, user=user, app=app)
    response, content = sr.splunkd_request(url, session_key, method="GET")
    if not response or response.status not in (200, 201):
        raise Exception("Failed to get google global settings."
                        "Check util log for more details %s" % url)
    stanzas = xdp.parse_conf_xml_dom(content)
    settings = {}
    for stanza in stanzas:
        settings[stanza[ggc.name]] = stanza

    if not utils.is_true(settings[ggc.proxy_settings].get(ggc.proxy_enabled)):
        settings[ggc.proxy_settings][ggc.proxy_url] = None

    return settings
Esempio n. 10
0
 def _get_task_configs(self, all_conf_contents, division_endpoint,
                       divide_setting):
     task_configs = list()
     orig_task_configs = all_conf_contents.get(division_endpoint)
     for (
             orig_task_config_stanza,
             orig_task_config_contents,
     ) in orig_task_configs.items():
         if util.is_true(orig_task_config_contents.get(c.disabled, False)):
             stulog.logger.debug("Stanza %s is disabled",
                                 orig_task_config_contents)
             continue
         orig_task_config_contents[c.divide_endpoint] = division_endpoint
         divide_tasks = self._divide_task_config(
             orig_task_config_stanza,
             orig_task_config_contents,
             divide_setting,
             all_conf_contents,
         )
         task_configs = task_configs + divide_tasks
     if self._stanza_name:
         for task_config in task_configs:
             if task_config[c.stanza_name] == self._stanza_name:
                 return [task_config]
     return task_configs
Esempio n. 11
0
    def load_value(self, endpoint_id, item_name, fname, fval):
        field_type = self._get_field_type(endpoint_id, item_name, fname)
        if field_type == "":
            return fval

        try:
            field_type = field_type.lower()
            if field_type == "bool":
                return True if sc_util.is_true(fval) else False
            elif field_type == "int":
                return int(fval)
            elif field_type == "json":
                try:
                    return json.loads(fval)
                except ValueError as err:
                    if err.message.startswith("Extra data:"):
                        return json.loads(self.try_fix_corrupted_json(fval, err))
                    else:
                        raise err
        except Exception as exc:
            msg = (
                'Fail to load value of "{type_name}" - '
                "endpoint={endpoint}, item={item}, field={field}"
                "".format(
                    type_name=field_type,
                    endpoint=endpoint_id,
                    item=item_name,
                    field=fname,
                )
            )
            log(msg, msgx=str(exc), level=logging.WARNING, need_tb=True)
            log("Value of failed load_value is", msgx=fval, level=logging.DEBUG)
            raise ConfigException(msg)
Esempio n. 12
0
    def __init__(self, config):
        """
        :param: config
        {
            "proxy_url": xxx,
            "proxy_port": xxx,
            "proxy_username": xxx,
            "proxy_password": xxx,
            "proxy_rdns": xxx,
            "proxy_type": xxx,
            "google_credentials": xxx,
            "google_project": xxx,
            "google_subscriptions": xxx,
            "google_topic": xxx,
            "batch_size": xxx,
            "base64encoded": True/False,
        }
        """

        self._config = config
        self._config["scopes"] = PUBSUB_SCOPES
        self._config["service_name"] = "pubsub"
        self._config["version"] = "v1"
        self._logger = logger
        self._client = gwc.create_google_client(self._config)
        self._base64encoded = scutil.is_true(self._config.get("base64encoded"))
Esempio n. 13
0
    def _gather_results(metric_name):
        tacommon.set_proxy_env(config)
        try:
            server_uri = config[tac.server_uri]
            session_key = config[tac.session_key]
            aws_account = config[tac.aws_account]
            aws_iam_role = config[tac.aws_iam_role]
            credentials = tacommon.load_credentials_from_cache(
                server_uri, session_key, aws_account, aws_iam_role)
            client = boto3.client(
                "cloudwatch",
                region_name=config[tac.region],
                aws_access_key_id=credentials.aws_access_key_id,
                aws_secret_access_key=credentials.aws_secret_access_key,
                aws_session_token=credentials.aws_session_token)

            (metrics, filtered) = list_metrics_by_metric_name(
                client, config[acc.metric_namespace], metric_name,
                dimension_regex_filters)

            if scutil.is_true(os.environ.get("cloudwatch_filter", "true")):
                metrics = filter_invalid_dimensions(
                    config[acc.metric_namespace], metrics, config)
            q.put((metrics, filtered))
        except Exception:
            logger.exception("Failed to list metric.",
                             datainput=config[tac.datainput],
                             metric_name=metric_name,
                             namespace=config[acc.metric_namespace],
                             region=config[tac.region])
Esempio n. 14
0
def get_state_store(meta_configs, appname, collection_name=None,
                    use_kv_store=False, http=None):
    if util.is_true(use_kv_store):
        http = sr.HttpPoolManager(meta_configs).pool()
        return StateStore(meta_configs, appname, collection_name, http=http)
    else:
        return FileStateStore(meta_configs, appname)
def build_http_connection(config, timeout=120, disable_ssl_validation=False):
    """
    :config: dict like, proxy and account information are in the following
             format {
                 "username": xx,
                 "password": yy,
                 "proxy_url": zz,
                 "proxy_port": aa,
                 "proxy_username": bb,
                 "proxy_password": cc,
                 "proxy_type": http,http_no_tunnel,sock4,sock5,
                 "proxy_rdns": 0 or 1,
             }
    :return: Http2.Http object
    """

    proxy_type_to_code = {
        "http": socks.PROXY_TYPE_HTTP,
        "http_no_tunnel": socks.PROXY_TYPE_HTTP_NO_TUNNEL,
        "socks4": socks.PROXY_TYPE_SOCKS4,
        "socks5": socks.PROXY_TYPE_SOCKS5,
    }
    if config.get("proxy_type") in proxy_type_to_code:
        proxy_type = proxy_type_to_code[config["proxy_type"]]
    else:
        proxy_type = socks.PROXY_TYPE_HTTP

    rdns = scu.is_true(config.get("proxy_rdns"))

    proxy_info = None
    if config.get("proxy_url") and config.get("proxy_port"):
        if config.get("proxy_username") and config.get("proxy_password"):
            proxy_info = ProxyInfo(
                proxy_type=proxy_type,
                proxy_host=config["proxy_url"],
                proxy_port=int(config["proxy_port"]),
                proxy_user=config["proxy_username"],
                proxy_pass=config["proxy_password"],
                proxy_rdns=rdns,
            )
        else:
            proxy_info = ProxyInfo(
                proxy_type=proxy_type,
                proxy_host=config["proxy_url"],
                proxy_port=int(config["proxy_port"]),
                proxy_rdns=rdns,
            )
    if proxy_info:
        http = Http(
            proxy_info=proxy_info,
            timeout=timeout,
            disable_ssl_certificate_validation=disable_ssl_validation,
        )
    else:
        http = Http(timeout=timeout,
                    disable_ssl_certificate_validation=disable_ssl_validation)

    if config.get("username") and config.get("password"):
        http.add_credentials(config["username"], config["password"])
    return http
Esempio n. 16
0
def build_http_connection(config, timeout=120, disable_ssl_validation=False):
    """
    :config: dict like, proxy and account information are in the following
             format {
                 "username": xx,
                 "password": yy,
                 "proxy_url": zz,
                 "proxy_port": aa,
                 "proxy_username": bb,
                 "proxy_password": cc,
                 "proxy_type": http,http_no_tunnel,sock4,sock5,
                 "proxy_rdns": 0 or 1,
             }
    :return: Http2.Http object
    """

    proxy_type_to_code = {
        "http": socks.PROXY_TYPE_HTTP,
        "http_no_tunnel": socks.PROXY_TYPE_HTTP_NO_TUNNEL,
        "socks4": socks.PROXY_TYPE_SOCKS4,
        "socks5": socks.PROXY_TYPE_SOCKS5,
    }
    if config.get("proxy_type") in proxy_type_to_code:
        proxy_type = proxy_type_to_code[config["proxy_type"]]
    else:
        proxy_type = socks.PROXY_TYPE_HTTP

    rdns = scu.is_true(config.get("proxy_rdns"))

    proxy_info = None
    if config.get("proxy_url") and config.get("proxy_port"):
        if config.get("proxy_username") and config.get("proxy_password"):
            proxy_info = ProxyInfo(proxy_type=proxy_type,
                                   proxy_host=config["proxy_url"],
                                   proxy_port=int(config["proxy_port"]),
                                   proxy_user=config["proxy_username"],
                                   proxy_pass=config["proxy_password"],
                                   proxy_rdns=rdns)
        else:
            proxy_info = ProxyInfo(proxy_type=proxy_type,
                                   proxy_host=config["proxy_url"],
                                   proxy_port=int(config["proxy_port"]),
                                   proxy_rdns=rdns)
    if proxy_info:
        # Creating Http object from httplib2shim library which is a wrapper over httplib2 library. This code is added since
        # the configuration of inputs failed when proxy is enabled on the instance.
        http = httplib2shim.Http(
            proxy_info=proxy_info,
            timeout=timeout,
            disable_ssl_certificate_validation=disable_ssl_validation)
    else:
        # Creating Http object from httplib2shim library which is a wrapper over httplib2 library. This code is added since
        # the configuration of inputs failed when proxy is enabled on the instance.
        http = httplib2shim.Http(
            timeout=timeout,
            disable_ssl_certificate_validation=disable_ssl_validation)

    if config.get("username") and config.get("password"):
        http.add_credentials(config["username"], config["password"])
    return http
Esempio n. 17
0
 def _use_cache_file(self):
     # TODO Move the default value outside code
     use_cache_file = is_true(self._task_config.get(c.use_cache_file, True))
     if use_cache_file:
         stulog.logger.info(
             "Stanza=%s using cached file store to create checkpoint",
             self._task_config[c.stanza_name])
     return use_cache_file
Esempio n. 18
0
def update_or_create_hec(config):
    """
    :param config:
    {
    "server_uri": xxx,
    "session_key": xxx,
    "hec_name": xxx,
    "hec_port": xxx,
    "use_hec": 0/1,
    "use_raw_hec": 0/1,
    }
    """

    use_hec = utils.is_true(config.get("use_hec"))
    use_raw_hec = utils.is_true(config.get("use_raw_hec"))
    if not use_hec and not use_raw_hec:
        return

    hec = HECConfig(config["server_uri"], config["session_key"])

    hec_input = hec.get_http_input(config["hec_name"])
    port = config.get("hec_port", 8088)
    if not hec_input:
        logger.info("Create HEC data input")
        hec_settings = {
            "enableSSL": 1,
            "port": port,
            "output_mode": "json",
            "disabled": 0,
        }
        hec.update_settings(hec_settings)
        input_settings = {
            "name": config["hec_name"],
        }
        hec.create_http_input(input_settings)
        hec_input = hec.get_http_input(config["hec_name"])

    hostname, _ = utils.extract_hostname_port(config["server_uri"])
    hec_uri = "https://{hostname}:{port}".format(hostname=hostname, port=port)
    if hec_input:
        hec_input[0]["hec_server_uri"] = hec_uri
        return hec_input[0]
    else:
        raise Exception("Failed to get HTTP input configuration")
Esempio n. 19
0
    def get(self, stanza_name, return_acl=False):
        """
        @return: dict object if sucess otherwise raise exception
        """

        stanza = self._conf_mgr.get_stanza(self._conf_file, stanza_name,
                                           ret_metadata=return_acl)
        stanza = self._decrypt(stanza)
        stanza["disabled"] = utils.is_true(stanza.get("disabled"))
        return stanza
Esempio n. 20
0
    def get(self, stanza_name, return_acl=False):
        """
        @return: dict object if sucess otherwise raise exception
        """

        stanza = self._conf_mgr.get_stanza(self._conf_file, stanza_name,
                                           ret_metadata=return_acl)
        stanza = self._decrypt(stanza)
        stanza["disabled"] = utils.is_true(stanza.get("disabled"))
        return stanza
Esempio n. 21
0
def build_http_connection(config, timeout=120, disable_ssl_validation=False):
    """
    :config: dict like, proxy and account information are in the following
             format {
                 "username": xx,
                 "password": yy,
                 "proxy_url": zz,
                 "proxy_port": aa,
                 "proxy_username": bb,
                 "proxy_password": cc,
                 "proxy_type": http,http_no_tunnel,sock4,sock5,
                 "proxy_rdns": 0 or 1,
             }
    :return: Http2.Http object
    """

    proxy_type_to_code = {
        "http": socks.PROXY_TYPE_HTTP,
        "http_no_tunnel": socks.PROXY_TYPE_HTTP_NO_TUNNEL,
        "socks4": socks.PROXY_TYPE_SOCKS4,
        "socks5": socks.PROXY_TYPE_SOCKS5,
    }
    if config.get("proxy_type") in proxy_type_to_code:
        proxy_type = proxy_type_to_code[config["proxy_type"]]
    else:
        proxy_type = socks.PROXY_TYPE_HTTP

    rdns = scu.is_true(config.get("proxy_rdns"))

    proxy_info = None
    if config.get("proxy_url") and config.get("proxy_port"):
        if config.get("proxy_username") and config.get("proxy_password"):
            proxy_info = ProxyInfo(proxy_type=proxy_type,
                                   proxy_host=config["proxy_url"],
                                   proxy_port=int(config["proxy_port"]),
                                   proxy_user=config["proxy_username"],
                                   proxy_pass=config["proxy_password"],
                                   proxy_rdns=rdns)
        else:
            proxy_info = ProxyInfo(proxy_type=proxy_type,
                                   proxy_host=config["proxy_url"],
                                   proxy_port=int(config["proxy_port"]),
                                   proxy_rdns=rdns)
    if proxy_info:
        http = Http(proxy_info=proxy_info,
                    timeout=timeout,
                    disable_ssl_certificate_validation=disable_ssl_validation)
    else:
        http = Http(timeout=timeout,
                    disable_ssl_certificate_validation=disable_ssl_validation)

    if config.get("username") and config.get("password"):
        http.add_credentials(config["username"], config["password"])
    return http
Esempio n. 22
0
 def _build_http_connection(self):
     """
     Build connection based on rest.py
     """
     enabled = is_true(self.config.get("proxy_enabled", ""))
     if not enabled:
         if self.config.get("proxy_url"):
             del self.config['proxy_url']
         if self.config.get("proxy_port"):
             del self.config['proxy_port']
     return build_http_connection(self.config, timeout=30)
Esempio n. 23
0
def user_operate(oprt):
    """
    The detail implementation of oktadeactivateuser command.
    :param oprt: POST, 'POST' is for oktadeactivateuser
    :
    """
    sk = get_session_key()
    okta_conf = get_okta_server_config(sk)
    error_msg = ""
    if is_true(okta_conf.get("custom_cmd_enabled", "")):
        keywords, options = splunk.Intersplunk.getKeywordsAndOptions()
        results, dummyresults, settings = splunk.Intersplunk.getOrganizedResults(
        )
        user = options.get('userid', None)
        username = None
        if not user:
            username = options.get('username', None)
        if username:
            command = 'search source=okta:user|dedup id|search profile.login="******"|fields id'
            user = _do_spl_search(sk, command)
            if not user:
                error_msg = "The username {} doest not exist. ".format(username)
        if user:
            server_url = okta_conf.get('okta_server_url', '')
            server_token = okta_conf.get('okta_server_token', '')
            if server_url and server_token:
                client = oac.OktaRestClient(okta_conf)
                endpoint = '/api/v1/users/' + user + '/lifecycle/deactivate'
                response = client.request(endpoint, None, oprt,
                                          'okta_server_url',
                                          'okta_server_token')
                if response.get("error"):
                    if oprt == "POST":
                        error_msg = "Failed to deactivate the user {0}. The user does not exist or the user is " \
                                    "deactivated. ".format(
                            username or user)+ "Error: "+ response.get('error')[0]%response.get('error')[1:]
                else:
                    result = {}
                    result["_time"] = time.time()
                    result["user_update_status"] = "success"
                    result[
                        "detail"] = "Deactivate the user {0} successfully.".format(
                            username or user) if oprt == "POST" else ""
                    results.append(result)
                    splunk.Intersplunk.outputResults(results)
                    _LOGGER.info(result["detail"])
        else:
            error_msg += "Missing Argument: 'userid/username' parameters is required."
    else:
        error_msg = "The custom command is not enabled. Please enable it on the setup page."
    if error_msg:
        splunk.Intersplunk.parseError(error_msg)
        _LOGGER.error(error_msg)
    def _get_description_tasks(self, conf_mgr):
        stanzas = conf_mgr.all_stanzas(self.task_file, do_reload=False)

        tasks, creds = [], {}
        for stanza in stanzas:
            if scutil.is_true(stanza.get(tac.disabled)):
                continue

            # Normalize tac.account to tac.aws_account
            stanza[tac.aws_account] = stanza.get(tac.account)
            tasks.extend(self._expand_tasks(stanza, creds))
        return tasks
    def _collect_scan_data(self, page_size):
        """
        The method to collect scan events at the size of page_size
        """
        count = 0
        self.ckpt.read()
        for (sid, ascan) in self.ckpt.contents.get(self.url,
                                                   {}).get("scans").items():
            try:
                scan_results = self.client.request(self.endpoint + '/' + sid)
                scan_results_content = scan_results.get('content', {})

                if scan_results_content is None:
                    continue

                his, last_his = self._collect_scan_history(
                    scan_results_content)
                hid = his.get('history_id') if his else None
                if hid and self.ckpt.is_new_scan(sid, hid):
                    self.ckpt.contents[self.url]["scans"][str(sid)] = {}
                    self.ckpt.contents[self.url]["scans"][str(
                        sid)]["history_id"] = hid
                    if last_his.get('status') in ('running', 'paused'):
                        self.ckpt.contents[self.url]["scans"][str(
                            sid)]["hosts"] = []
                    elif util.is_true(self.config.get("index_events_for_unsuccessful_scans", True)) or last_his.get(
                            'status') \
                            =='completed':
                        self.ckpt.contents[self.url]["scans"][str(
                            sid)]["hosts"] = [
                                ahost.get("host_id")
                                for ahost in scan_results_content.get(
                                    "hosts", [])
                            ]
                    else:
                        self.ckpt.contents[self.url]["scans"][str(
                            sid)]["hosts"] = []
                    self.ckpt.write()

                scan_info = self._collect_scan_info(scan_results_content)
                if not scan_info:
                    continue

                count += self._collect_scan_data_of_one_scan(
                    sid, scan_info, page_size)

                if count > page_size:
                    self.ckpt.write()
                    return count
            except Exception:
                _LOGGER.error(traceback.format_exc())
        self.ckpt.write()
        return count
Esempio n. 26
0
    def _handle_hec(self):
        if not self._task_configs:
            return

        use_hec = utils.is_true(self._task_configs[0].get(ggc.use_hec))
        use_raw_hec = utils.is_true(self._task_configs[0].get(ggc.use_raw_hec))
        if not use_hec and not use_raw_hec:
            return

        hec = hc.HECConfig(self.metas[ggc.server_uri],
                           self.metas[ggc.session_key])

        hec_input = hec.get_http_input("google_cloud_platform")
        port = self._task_configs[0].get(ggc.hec_port, 8088)
        if not hec_input:
            logger.info("Create HEC data input")
            hec_settings = {
                "enableSSL": 1,
                "port": port,
                "output_mode": "json",
                "disabled": 0,
            }
            hec.update_settings(hec_settings)
            input_settings = {
                "name": "google_cloud_platform",
            }
            hec.create_http_input(input_settings)
            hec_input = hec.get_http_input("google_cloud_platform")

        hostname, _ = utils.extract_hostname_port(self.metas[ggc.server_uri])
        hec_uri = "https://{hostname}:{port}".format(hostname=hostname,
                                                     port=port)
        if hec_input:
            keys = [ggc.index, ggc.name]
            for task in self._task_configs:
                with utils.save_and_restore(task, keys):
                    task.update(hec_input[0])
                    task["hec_server_uri"] = hec_uri
        else:
            raise Exception("Failed to get HTTP input configuration")
Esempio n. 27
0
def create_data_loader_mgr(config):
    """
    create a data loader with default event_writer, job_scheudler
    """

    import splunktalib.event_writer as ew
    import splunktalib.schedule.scheduler as sched

    writer = ew.create_event_writer(
        config, scutil.is_true(config.get("use_multiprocess")))
    scheduler = sched.Scheduler()
    loader_mgr = DataLoaderManager(config, scheduler, writer)
    return loader_mgr
Esempio n. 28
0
def create_data_loader_mgr(config):
    """
    create a data loader with default event_writer, job_scheudler
    """

    import splunktalib.event_writer as ew
    import splunktalib.schedule.scheduler as sched

    writer = ew.create_event_writer(
        config, scutil.is_true(config.get("use_multiprocess")))
    scheduler = sched.Scheduler()
    loader_mgr = DataLoaderManager(config, scheduler, writer)
    return loader_mgr
Esempio n. 29
0
    def _handle_hec(self):
        if not self._task_configs:
            return

        use_hec = utils.is_true(self._task_configs[0].get(ggc.use_hec))
        use_raw_hec = utils.is_true(self._task_configs[0].get(ggc.use_raw_hec))
        if not use_hec and not use_raw_hec:
            return

        hec = hc.HECConfig(
            self.metas[ggc.server_uri], self.metas[ggc.session_key])

        hec_input = hec.get_http_input("google_cloud_platform")
        port = self._task_configs[0].get(ggc.hec_port, 8088)
        if not hec_input:
            logger.info("Create HEC data input")
            hec_settings = {
                "enableSSL": 1,
                "port": port,
                "output_mode": "json",
            }
            hec.update_settings(hec_settings)
            input_settings = {
                "name": "google_cloud_platform",
                "description": "HTTP input for Google Cloud Platform AddOn"
            }
            hec.create_http_input(input_settings)
            hec_input = hec.get_http_input("google_cloud_platform")

        hostname, _ = utils.extract_hostname_port(
            self.metas[ggc.server_uri])
        hec_uri = "https://{hostname}:{port}".format(
            hostname=hostname, port=port)
        if hec_input:
            for task in self._task_configs:
                task.update(hec_input[0])
                task["hec_server_uri"] = hec_uri
        else:
            raise Exception("Failed to get HTTP input configuration")
Esempio n. 30
0
    def __init__(self, config, stream):
        self._key = None
        self._ckpt = None
        if scutil.is_true(config.get(tac.use_kv_store)):
            self._store = ss.get_state_store(
                config,
                config[tac.app_name],
                collection_name=aclc.cloudwatch_logs_log_ns,
                use_kv_store=True)
        else:
            self._store = ss.get_state_store(config, config[tac.app_name])

        self._pop_ckpt(config, stream)
Esempio n. 31
0
    def _get_topics(kafka_clusters):
        """
        Query and filter topic according to topic blacklist and whitelist
        :return: {
            cluster_stanza_name: {
                kafka_topic: set([topic1, topic2, ...]),
                kafka_partition: partitions,
                kafka_topic_group: topic_group,
                index: index,
            }
        }
        """

        topic_set = {}
        all_topics = {}
        kafka_clients = {}
        for cluster_stanza, cluster in kafka_clusters.iteritems():
            if utils.is_true(cluster.get(c.removed)):
                continue

            client = KafkaConfig._get_client(cluster, kafka_clients)
            topics = set(client.topic_partitions.keys())
            if not topics:
                # We treat this case an error since sometimes kafka python
                # bining quirks and which can result in data inputs deletion

                msg = "Didn't get any topic from Kafka brokers={}".format(
                    cluster[c.kafka_brokers])
                logger.error(msg)
                raise Exception(msg)

            try:
                topics = KafkaConfig._filter_topics(topics, cluster)
            except Exception:
                logger.error("Failed to filter topics, error=%s",
                             traceback.format_exc())
                continue

            partitions = KafkaConfig.get_partitions(cluster)
            partitions = ",".join(str(pid) for pid in partitions)

            # For now, just reporting
            KafkaConfig._handle_dup_topic_partition(cluster, topics,
                                                    partitions, topic_set)
            all_topics[cluster_stanza] = {
                c.kafka_topic: topics,
                c.kafka_partition: partitions,
                c.kafka_topic_group: cluster.get(c.kafka_topic_group),
                c.index: cluster.get(c.index),
            }
        return all_topics
Esempio n. 32
0
    def _get_kinesis_tasks(self, conf_mgr):
        proxy_info = tpc.get_proxy_info(self.metas[tac.session_key])
        stanzas = conf_mgr.all_stanzas(self.task_file, do_reload=False)

        tasks, creds = [], {}
        for stanza in stanzas:
            if scutil.is_true(stanza[tac.disabled]):
                continue
            # Normalize tac.account to tac.aws_account
            stanza[tac.aws_account] = stanza.get(tac.account)
            stanza[tac.aws_iam_role] = stanza.get(tac.aws_iam_role)
            tasks.extend(self._expand_tasks(stanza, creds, proxy_info))

        return tasks
    def _get_config_rule_tasks(self, conf_mgr):
        stanzas = conf_mgr.all_stanzas(self.task_file, do_reload=False)

        tasks = []
        for stanza in stanzas:
            if scutil.is_true(stanza.get(tac.disabled)):
                continue

            stanza[tac.server_uri] = self.metas[tac.server_uri]
            stanza[tac.session_key] = self.metas[tac.session_key]
            # Normalize tac.account to tac.aws_account
            stanza[tac.aws_account] = stanza.get(tac.account)
            tasks.extend(self._expand_tasks(stanza))
        return tasks
Esempio n. 34
0
 def _remove_forwarders(self, forwarders, conf_mgr):
     for forwarder_stanza, forwarder in forwarders.iteritems():
         if utils.is_true(forwarder.get(c.removed)):
             if forwarder.get(c.appname):
                 conf_mgr.set_appname(forwarder.get(c.appname))
             else:
                 conf_mgr.set_appname(self._appname)
             try:
                 conf_mgr.delete(forwarder_stanza)
             except req.ConfNotExistsException:
                 pass
             except Exception:
                 logger.error(
                     "Failed to delete removed forwarder=%s,"
                     "error=%s", forwarder_stanza, traceback.format_exc())
    def all(self, filter_disabled=False, return_acl=True):
        """
        @return: a dict of dict objects if success
        otherwise exception
        """

        results = {}
        stanzas = self._conf_mgr.all_stanzas(self._conf_file, ret_metadata=return_acl)
        for stanza in stanzas:
            stanza = self._decrypt(stanza)
            stanza["disabled"] = utils.is_true(stanza.get("disabled"))
            if filter_disabled and stanza["disabled"]:
                continue
            results[stanza["name"]] = stanza
        return results
    def all(self, filter_disabled=False, return_acl=True):
        """
        @return: a dict of dict objects if success
        otherwise exception
        """

        results = {}
        stanzas = self._conf_mgr.all_stanzas(self._conf_file,
                                             ret_metadata=return_acl)
        for stanza in stanzas:
            stanza = self._decrypt(stanza)
            stanza["disabled"] = utils.is_true(stanza.get("disabled"))
            if filter_disabled and stanza["disabled"]:
                continue
            results[stanza["name"]] = stanza
        return results
    def _use_multiprocess(self):
        if not self._task_configs:
            return False

        return scutil.is_true(self._task_configs[0].get(ggc.use_multiprocess))
Esempio n. 38
0
def get_state_store(meta_configs, appname, collection_name="talib_states",
                    use_kv_store=False):
    if util.is_true(use_kv_store):
        return StateStore(meta_configs, appname, collection_name)
    else:
        return FileStateStore(meta_configs, appname)