import os.path as op import traceback from splunktalib.common import log logger = log.Logs().get_logger("util") class FileMonitor(object): def __init__(self, callback, files): """ :files: files to be monidtored with full path """ self._callback = callback self._files = files self.file_mtimes = {file_name: None for file_name in self._files} for k in self.file_mtimes: try: self.file_mtimes[k] = op.getmtime(k) except OSError: logger.debug("Getmtime for %s, failed: %s", k, traceback.format_exc()) def check_changes(self): logger.debug("Checking files=%s", self._files) file_mtimes = self.file_mtimes changed_files = [] for f, last_mtime in file_mtimes.iteritems(): try:
import hashlib import copy import splunktalib.conf_manager.conf_manager as cm import splunktalib.state_store as ss import taaws.ta_aws_consts as tac import s3_mod.aws_s3_consts as asc from splunktalib.common import log logger = log.Logs().get_logger(asc.s3_log) def get_key_ckpt_key(bucket_name, key_name): return hashlib.md5("{bucket_name}${key_name}".format( bucket_name=bucket_name, key_name=key_name)).hexdigest() def create_state_store(config): store = ss.get_state_store(config, config[tac.app_name], collection_name="aws_s3_" + config[asc.data_input], use_kv_store=config.get(tac.use_kv_store)) return store class S3IndexCheckpointer(object): def __init__(self, config, new=True): self._config = config self._store = create_state_store(config) self._ckpt_key = "{}.ckpt".format(config[asc.data_input])
import os.path as op import splunktalib.common.util as utils import splunktalib.modinput as modinput import splunktalib.file_monitor as fm import splunktalib.orphan_process_monitor as opm from splunktalib.common import log import kafka_config as kc import kafka_consts as c import kafka_concurrent_data_loader as kcdl utils.remove_http_proxy_env_vars() utils.disable_stdout_buffer() logger = log.Logs().get_logger("main") def do_scheme(): """ Feed splunkd the TA's scheme """ desc = ("Enable modular inputs to collect Kafka topic data. Use only if " "managing inputs manually from individual forwarders. " "See documentation for details.") print """ <scheme> <title>Splunk Add-on for {}</title> <description>{}</description> <use_external_validation>true</use_external_validation>
import urllib import json from traceback import format_exc import sys import os.path as op import common.util as util sys.path.insert(0, op.dirname(op.dirname(op.abspath(__file__)))) import splunktalib.common.util as scu import splunktalib.common.log as log _LOGGER = log.Logs().get_logger("ta_util_rest") from httplib2 import (socks, ProxyInfo, Http) def splunkd_request(splunkd_uri, session_key, method="GET", headers=None, data=None, timeout=30, retry=1): """ @return: httplib2.Response and content """ headers = headers if headers is not None else {} headers["Authorization"] = "Splunk {0}".format(session_key) content_type = headers.get("Content-Type")
import splunk.admin as admin import splunk.clilib.cli_common as scc from splunktalib import credentials as cred from splunktalib.conf_manager import conf_manager as conf from splunktalib.common import log from splunktalib.common import util import logging import traceback logger = log.Logs().get_logger('ta_okta', level=logging.DEBUG) util.remove_http_proxy_env_vars() """ Copyright (C) 2005 - 2015 Splunk Inc. All Rights Reserved. Description: This skeleton python script handles the parameters in the configuration page. handleList method: lists configurable parameters in the configuration page corresponds to handleractions = list in restmap.conf handleEdit method: controls the parameters and saves the values corresponds to handleractions = edit in restmap.conf """ class ConfigApp(admin.MConfigHandler): encrypted = "********" """ Set up supported arguments """ def setup(self):
import logging from splunktalib.common import log import okta_command_common as occ _LOGGER = log.Logs().get_logger("ta_okta", level=logging.DEBUG) def add_member(): """ The entrance method to add a user to a group """ _LOGGER.info("call add_member()") occ.member_operate("PUT") if __name__ == "__main__": add_member()
""" import sys import os.path as op sys.path.insert(0, op.join(op.dirname(op.abspath(__file__)), "splunktalib")) import logging from datetime import datetime import traceback import nessus_data_collector as ndc import nessus_config import re from splunktalib.common import log from splunktalib.common import util _LOGGER = log.Logs().get_logger("ta_nessus", level=logging.DEBUG) util.remove_http_proxy_env_vars() def do_scheme(): """ Feed splunkd the TA's scheme """ print """ <scheme> <title>Splunk Add-on for Nessus</title> <description>Splunk Add-on for Nessus</description> <use_external_validation>true</use_external_validation> <streaming_mode>xml</streaming_mode> <use_single_instance>False</use_single_instance>
import splunktalib.rest as rest from splunktalib.common import log _LOGGER = log.Logs().get_logger("ta_util_conf_manager") def _content_request(uri, session_key, method, payload, err_msg): resp, content = rest.splunkd_request(uri, session_key, method, data=payload, retry=3) if resp is None and content is None: return None if resp.status in (200, 201): return content else: _LOGGER.error("%s, reason=%s", err_msg, resp.reason) return None
import os import sys import subprocess import ConfigParser import os.path as op import traceback from splunktalib.common import log from splunktalib.common import util import splunktalib.credentials as cred import splunktalib.rest as rest import splunktalib.common.xml_dom_parser as xdp _LOGGER = log.Logs().get_logger("ta_util") def _parse_modinput_configs(root, outer_block, inner_block): """ When user splunkd spawns modinput script to do config check or run <?xml version="1.0" encoding="UTF-8"?> <input> <server_host>localhost.localdomain</server_host> <server_uri>https://127.0.0.1:8089</server_uri> <session_key>xxxyyyzzz</session_key> <checkpoint_dir>ckpt_dir</checkpoint_dir> <configuration> <stanza name="snow://alm_asset"> <param name="duration">60</param>
handleEdit method: controls the parameters and saves the values corresponds to handleractions = edit in restmap.conf """ import json import splunk.clilib.cli_common as scc import splunk.admin as admin import splunktalib.common.util as utils import splunktalib.common.log as log from splunktalib.conf_manager import ta_conf_manager as ta_conf from splunktalib.conf_manager import conf_manager as conf import kafka_consts as c logger = log.Logs().get_logger("setup") class ConfigApp(admin.MConfigHandler): valid_args = ("all_settings", ) stanza_map = { c.global_settings: True, c.proxy_settings: False, c.credential_settings: True, c.forwarder_credential_settings: True, } cred_fields = (c.password, ) encrypt_fields = (c.password, c.username)
stclog.Logs().set_level(logging.WARNING) elif log_level.upper() == "CRITICAL": stclog.Logs().set_level(logging.CRITICAL) else: stclog.Logs().set_level(logging.INFO) elif isinstance(log_level, int): if log_level in [logging.DEBUG, logging.INFO, logging.ERROR, logging.WARN, logging.WARNING, logging.CRITICAL]: stclog.Logs().set_level(log_level) else: stclog.Logs().set_level(logging.INFO) else: stclog.Logs().set_level(logging.INFO) # Global logger logger = stclog.Logs().get_logger("cloud_connect_engine") def reset_logger(name): """ Reset logger. """ stclog.reset_logger(name) global logger logger = stclog.Logs().get_logger(name)
sys.path.insert(0, op.join(op.dirname(op.abspath(__file__)), 'splunktalib')) from splunktalib.conf_manager import conf_manager as conf from splunktalib import credentials as cred from splunktalib.common import log import splunktalib.rest as rest import splunktalib.common.util as utils from xml.etree import cElementTree as ET import csv import os import logging import traceback import splunk_cluster as sc utils.remove_http_proxy_env_vars() logger = log.Logs().get_logger('ta_symantec-ep', level=logging.DEBUG) def construct_url(base_url): url_list = [] order_list = map(chr, range(65, 91)) order_list.append('_1234567890') for i in order_list: url_list.append(base_url + "?azid=" + i) return url_list def extract_xml(http, url_list, retry=3): item_list = [] for url in url_list: resp, content = None, None
def set_log_level(log_level): """ Set log level. """ stclog.Logs().set_level(_get_log_level(log_level))
log_level = log_level.upper() for k, v in _level_by_name.items(): if k.startswith(log_level): return v if isinstance(log_level, int): if log_level in list(_level_by_name.values()): return log_level return default_level def set_log_level(log_level): """ Set log level. """ stclog.Logs().set_level(_get_log_level(log_level)) # Global logger logger = stclog.Logs().get_logger("ucc_lib") def reset_logger(name): """ Reset logger. """ stclog.reset_logger(name) global logger logger = stclog.Logs().get_logger(name)