def setup_logger(name, level=logging.INFO, maxBytes=25000000, backupCount=5, format=SHORT_FORMAT): """ Set up a logging instance. @param name: The log file name. We recommend "$action_name$_modalert". @param level: The logging level. @param maxBytes: The maximum log file size before rollover. @param backupCount: The number of log files to retain. @return logger: Returns an instance of logger """ logfile = make_splunkhome_path(['var', 'log', 'splunk', name + '.log']) logger = logging.getLogger(name) logger.setLevel(level) logger.propagate = False # Prevent the log messages from being duplicated in the python.log file # Prevent re-adding handlers to the logger object, which can cause duplicate log lines. handler_exists = any([True for h in logger.handlers if h.baseFilename == logfile]) if not handler_exists: file_handler = logging.handlers.RotatingFileHandler(logfile, maxBytes=maxBytes, backupCount=backupCount) formatter = logging.Formatter(format) file_handler.setFormatter(formatter) logger.addHandler(file_handler) return logger
from xmloutput import setupLogger from .fields import (BooleanField, Field, FieldValidationException, IntervalField) try: from splunk.clilib.bundle_paths import make_splunkhome_path except ImportError: from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path if 'slave' in splunk.clilib.cli_common.getMergedConf('server').get( 'clustering', {}).get('mode', {}): sys.path.append(os.path.join(get_slaveapps_base_path(), "@appname@", "lib")) else: sys.path.append(make_splunkhome_path(["etc", "apps", "@appname@", "lib"])) # Define logger using the name of the script here, versus in the modular_input class. # logger = log.setup_logger(name='python_modular_input', level=logging.INFO) logger = setupLogger( logger=None, log_format='%(asctime)s %(levelname)s [ModularInput] %(message)s', level=logging.INFO, log_name="python_modular_input.log", logger_name="modinput") class ModularInputConfig(object): def __init__(self, server_host, server_uri, session_key, checkpoint_dir, configuration): self.server_host = server_host
""" (C) 2019 Splunk Inc. All rights reserved. REST endpoint handler for the Spacebridge SAML MDM registration process """ import base64 import json import os import sys from functools import partial os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION'] = 'python' from splunk.clilib.bundle_paths import make_splunkhome_path sys.path.append( make_splunkhome_path(['etc', 'apps', 'splunk_app_cloudgateway', 'lib'])) sys.path.append( make_splunkhome_path(['etc', 'apps', 'splunk_app_cloudgateway', 'bin'])) from spacebridgeapp.rest.services.kvstore_service import KVStoreCollectionAccessObject as kvstore from spacebridgeapp.rest.services.splunk_service import update_or_create_sensitive_data, fetch_sensitive_data from cloudgateway.private.encryption.encryption_handler import sign_verify, sign_detached, encrypt_for_send, \ decrypt_for_receive, decrypt_session_token from cloudgateway.private.sodium_client import SodiumClient from cloudgateway.encryption_context import EncryptionContext, generate_keys from cloudgateway.device import EncryptionKeys from cloudgateway.splunk.encryption import SplunkEncryptionContext from spacebridgeapp.request.splunk_auth_header import SplunkAuthHeader from cloudgateway.splunk.auth import SplunkJWTCredentials from spacebridgeapp.util import constants from spacebridgeapp.logging import setup_logging
""" Copyright (C) 2009-2020 Splunk Inc. All Rights Reserved. REST endpoint handler for fetching public and private keys necessary for signing cloud gateway messages """ import sys import json from splunk.persistconn.application import PersistentServerConnectionApplication from splunk.clilib.bundle_paths import make_splunkhome_path sys.path.append(make_splunkhome_path(['etc', 'apps', 'splunk_secure_gateway', 'bin'])) from spacebridgeapp.util import py23 from spacebridgeapp.logging import setup_logging from spacebridgeapp.util import constants from spacebridgeapp.rest.base_endpoint import BaseRestHandler from spacebridgeapp.rest.services import splunk_service from spacebridgeapp.util.constants import MTLS_KEY, MTLS_CERT LOGGER = setup_logging(constants.SPACEBRIDGE_APP_NAME + ".log", "secrets_handler") _ALLOWED_SECRETS = [MTLS_KEY, MTLS_CERT] class SecretsStore(BaseRestHandler, PersistentServerConnectionApplication): def __init__(self, command_line, command_arg): BaseRestHandler.__init__(self)
#!/usr/bin/env python3 # encoding: utf-8 import argparse import logging import os import signal import sys from splunk.clilib.bundle_paths import make_splunkhome_path os.environ["EVENTGEN_LOG_DIR"] = make_splunkhome_path(["var", "log", "splunk"]) # Set path so libraries will load sys.path.insert(0, make_splunkhome_path(["etc", "apps", "SA-Eventgen", "lib"])) sys.path.insert( 0, make_splunkhome_path( ["etc", "apps", "SA-Eventgen", "lib", "splunk_eventgen", "lib"]), ) from mod_input import ModularInput # noqa isort:skip from mod_input.fields import VerbosityField # noqa isort:skip from splunk_eventgen import eventgen_core # noqa isort:skip from splunk_eventgen.lib import eventgenconfig # noqa isort:skip from xmloutput import XMLOutputManager, setupLogger # noqa isort:skip # Initialize logging logger = setupLogger( logger=None, log_format="%(asctime)s %(levelname)s [Eventgen] %(message)s", level=logging.DEBUG, log_name="modinput_eventgen.log",
def writeevents(self, index='summary', host=None, source=None, fext='common_action_model'): """ The purpose of this method is to create arbitrary splunk events from the list of events in the ModularAction instance. Please use addevent() for populating the list of events in the ModularAction instance. @param index: The index to write the events to. Defaults to "summary". @param host: The value of host the events should take on. Defaults to None (auto). @param source: The value of source the events should take on. Defaults to None (auto). @param fext: The extension of the file to write out. Files are written to $SPLUNK_HOME/var/spool/splunk. File extensions can only contain word characters, dash, and have a 200 char max. "stash_" is automatically prepended to all extensions. Defaults to "common_action_model" ("stash_common_action_model"). Only override if you've set up a corresponding props.conf stanza to handle the extension. @return bool: Returns True if all events were successfully written Returns False if any errors were encountered """ if self.events: ## sanitize file extension if not fext or not re.match('^[\w-]+$', fext): self.logger.warn( 'Requested file extension was ignored due to invalid characters' ) fext = 'common_action_model' elif len(fext) > 200: self.logger.warn( 'Requested file extension was ignored due to length') fext = 'common_action_model' ## header header_line = ModularAction.DEFAULT_HEADER % ( ModularAction.get_header_item('index', index, ModularAction.DEFAULT_INDEX), ModularAction.get_header_item('host', host), ModularAction.get_header_item('source', source)) header_line = header_line.rstrip() ## process event chunks for chunk in (self.events[x:x + ModularAction.DEFAULT_CHUNK] for x in xrange(0, len(self.events), ModularAction.DEFAULT_CHUNK)): ## initialize output string default_breaker = '\n' + ModularAction.DEFAULT_BREAKER fout = header_line + default_breaker + ( default_breaker).join(chunk) ## write output string try: fn = '%s_%s.stash_%s' % (mktimegm( time.gmtime()), random.randint(0, 100000), fext) fp = make_splunkhome_path(['var', 'spool', 'splunk', fn]) ## obtain fh with open(fp, 'w') as fh: fh.write(fout) except: signature = 'Error obtaining file handle during makeevents' self.message(signature, level=logging.ERROR, file_path=fp) self.logger.exception(signature + ' file_path=%s' % fp) return False self.message('Successfully created splunk events', event_count=len(self.events)) return True return False
import logging import logging.handlers import math import os import random import re import StringIO import struct import sys import time import splunk.util as util from splunk.clilib.bundle_paths import make_splunkhome_path sys.path.append(make_splunkhome_path(['etc', 'apps', 'SA-Utils', 'lib'])) from SolnCommon.kvstore import KvStoreHandler class Duration(object): DURATION_MAP = [("y", 31556926), ("yr", 31556926), ("yrs", 31556926), ("year", 31556926), ("years", 31556926), ("mon", 2629744), ("M", 2629744), ("month", 2629744), ("months", 2629744), ("q", 3 * 2629744), ("qtr", 3 * 2629744), ("qtrs", 3 * 2629744), ("quarter", 3 * 2629744), ("quarters", 3 * 2629744), ("d", 86400), ("day", 86400), ("days", 86400), ("w", 7 * 86400), ("week", 7 * 86400), ("weeks", 7 * 86400), ("h", 3600), ("hr", 3600), ("hrs", 3600), ("hour", 3600), ("hours", 3600), ("m", 60), ("min", 60), ("minute", 60), ("minutes", 60), ("s", 1),
SpacebridgeCompanionAppError, SpacebridgeUnsupportedMessageTypeError from spacebridgeapp.request import connectivity_test_request_processor from spacebridgeapp.request.request_processor import parse_session_token, parse_run_as_credentials from spacebridgeapp.subscriptions import subscription_processor from spacebridgeapp.request.request_list import REQUESTS, SUBSCRIPTION_REQUESTS, ENCRYPTION_CONTEXT from spacebridgeapp.request.request_type import RequestType from spacebridgeapp.metrics.websocket_metrics import send_websocket_metrics_to_telemetry from spacebridgeapp.logging import setup_logging from splapp_protocol import envelope_pb2 from splapp_protocol import common_pb2 from spacebridge_protocol import websocket_pb2 from spacebridge_protocol import sb_common_pb2 from splunk.clilib.bundle_paths import make_splunkhome_path sys.path.append(make_splunkhome_path(['etc', 'apps', SPACEBRIDGE_APP_NAME, 'lib'])) LOGGER = setup_logging(SPACEBRIDGE_APP_NAME + "_message_processor.log", "message_processor") async def post_process_message(request_context, input_server_application_message, async_client_factory, guid_generator): # Post processing on serverSubscriptionResponse if input_server_application_message.HasField(SERVER_SUBSCRIPTION_RESPONSE): try: # Create Server Application Message to return for post_processing server_application_message = envelope_pb2.ServerApplicationMessage() # Populate a server_subscription_update which
import sys from splunk.clilib.bundle_paths import make_splunkhome_path sys.path.append( make_splunkhome_path(['etc', 'apps', 'splunk_app_infrastructure', 'bin'])) # noqa import em_path_inject # noqa import http.client # common packages from em_rest_aws_impl import EMAwsInterfaceImpl from em_migration.migration_before_handle_hook import MigrationStatusCheckHook from logging_utils import log from rest_handler import rest_interface_splunkd from rest_handler.rest_interface_splunkd import route from rest_handler.session import session from rest_handler.hooks import before_handle_hooks logger = log.getLogger() @before_handle_hooks([MigrationStatusCheckHook]) class EMAwsInterface(rest_interface_splunkd.BaseRestInterfaceSplunkd): @route('/check_env', methods=['GET']) def handle_check_env(self, request): interface_impl = EMAwsInterfaceImpl(session['authtoken']) if request.method == 'GET': logger.info('User triggered AWS check env') response = interface_impl.handle_check_env(request) return http.client.OK, response
try: import http.client as http_client except ImportError: import httplib as http_client import json import operator import splunk.rest as rest import sys from splunk import RESTException from splunk.clilib.bundle_paths import make_splunkhome_path from splunk.persistconn.application import PersistentServerConnectionApplication sys.path.append(make_splunkhome_path(['etc', 'apps', 'Splunk_SA_CIM', 'lib'])) from splunk_sa_cim.log import setup_logger from splunk_sa_cim.modaction_queue import ModularActionQueueBR, ModularActionQueueISE, ModularActionQueueUnauth, ModularActionQutils logger = setup_logger('modaction_queue_handler') class ModularActionQueueHandler(PersistentServerConnectionApplication): '''REST handler for generating modular action queue api keys.''' DEFAULT_MAX_ITEMS = 10 def __init__(self, command_line, command_arg): super(ModularActionQueueHandler, self).__init__() try: self.params = json.loads(command_arg) except Exception as e: logger.warn(e)
import cherrypy import json import splunk.appserver.mrsparkle.controllers as controllers from splunk.appserver.mrsparkle.lib import jsonresponse from splunk.appserver.mrsparkle.lib.decorators import expose_page from splunk.appserver.mrsparkle.lib.routes import route from splunk.rest import simpleRequest import sys from splunk.clilib.bundle_paths import make_splunkhome_path sys.path.append(make_splunkhome_path(["etc", "apps", "SA-ThreatIntelligence", "bin"])) from custom_search_builder.base import CustomSearchBuilderBase from custom_search_builder.exceptions import * from custom_search_builder.make_correlation_search import makeCorrelationSearch from custom_search_builder.make_lookup_generating_search import makeLookupGeneratingSearch sys.path.append(make_splunkhome_path(["etc", "apps", "Splunk_SA_CIM", "lib"])) from cim_models import DataModels import logging logger = logging.getLogger('splunk.appserver.SA-ThreatIntelligence.controllers.CustomSearchBuilder') class CustomSearchBuilder(controllers.BaseController): @route('/:get_data_models=get_data_models') @expose_page(must_login=True, methods=['GET']) def getDataModelsAndObjects(self, **kwargs): # Get the session key sessionKey = cherrypy.session.get('sessionKey')
where = a.find('=') file = a[where + 1:len(a)] dest_ports = [] exceptions = {} first = True srcFields = [] destFields = [] results = '' ## Retrive results and settings results, dummyresults, settings = splunk.Intersplunk.getOrganizedResults() ## Determine path to lookup baseStorage = make_splunkhome_path(["etc", "apps", app, 'lookups', file]) try: dest_ports = csv.reader(open(baseStorage, 'rU')) except: results = splunk.Intersplunk.generateErrorResults( 'Error; directionalize exceptions file: ' + baseStorage + ' not found') for dest_port in dest_ports: exceptions[dest_port[0]] = "yes" for x in range(0, len(results)): tempFields = {} if first:
import splunk.auth as auth try: # In newer versions of splunk we should be importing from # this location from splunk.clilib.bundle_paths import make_splunkhome_path except ImportError: # Older versions may still use this import from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path app_info = '{"app": "Splunk_TA_ForIndexers", "label": "Splunk App For Indexers", "version": "1.0.0", "build": "0"}' include_indexes = True include_properties = True imported_apps_only = True namespace = 'SplunkEnterpriseSecuritySuite' spl_location = make_splunkhome_path(['etc', 'apps', 'SA-Utils', 'local', 'data', 'appmaker']) def create_parser(): ''' Wrapper for the argument parser ''' parser = argparse.ArgumentParser(description="Script to generate Enterprise Security TA bundle") parser.add_argument("--username", default="admin", type=str, help="Splunk username") parser.add_argument("--password", default=None, type=str, help="Splunk password") return parser def make_ta_for_indexers(username, password): ''' Splunk_TA_ForIndexers spl generation for ES 4.2.0 and up There are now three versions of ES we're now supporting (changes to makeIndexTimeProperties
nametermsfilename = argvals.get("name-terms", nametermsfilename) dictionaryfilename = argvals.get("dictionary", dictionaryfilename) timestampconfigfilename = argvals.get("time-config", timestampconfigfilename) # locate the files app = argvals.get("namespace") # first find the app, if it exists app_dir = None if app: if "/" in app or "\\" in app or ".." in app: msg = "Error: namespace name may not include the '/' '\\' or '..' sequences" err_results.append(msg) else: app_dir = make_splunkhome_path(['etc', 'apps', app]) if not os.path.isdir(app_dir): app_dir = make_splunkhome_path(['etc', 'slave-apps', app]) if not os.path.isdir(app_dir): msg = "Error: could not find specified app '%s' on disk" % app err_results.append(msg) app_dir = None # now find each file in either the app or the global dir publicterms_path = locate_anonymize_file(publictermsfilename, app_dir, err_results) privateterms_path = locate_anonymize_file(privatetermsfilename, app_dir, err_results) nameterms_path = locate_anonymize_file(nametermsfilename, app_dir, err_results) dictionary_path = locate_anonymize_file(dictionaryfilename, app_dir,
# CORE SPLUNK IMPORTS (not needed) # import splunk # import splunk.search as splunkSearch # from splunk.rest import simpleRequest # import splunk.version as ver # import splunk.clilib.cli_common # import splunk.auth, splunk.search # import splunk.Intersplunk as si try: from splunk.clilib.bundle_paths import make_splunkhome_path except ImportError: from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path sys.path.append(make_splunkhome_path(["etc", "apps", "Splunk_SA_CIM", "lib"])) sys.path.append(make_splunkhome_path(["etc", "apps", "SA-Utils", "lib"])) sys.path.append(make_splunkhome_path(["etc", "apps", "Splunk_TA_paloalto", "bin", "lib"])) sys.path.append(make_splunkhome_path(["etc", "apps", "Splunk_TA_paloalto", "bin", "lib", "pan-python", "lib"])) sys.path.append(make_splunkhome_path(["etc", "apps", "Splunk_TA_paloalto", "bin", "lib", "pandevice"])) from cim_actions import ModularAction import common from pandevice.firewall import Firewall # set the maximum allowable CSV field size # # The default of the csv module is 128KB; upping to 10MB. See SPL-12117 for # the background on issues surrounding field sizes. # (this method is new in python 2.5) csv.field_size_limit(10485760)
def writeevents(self, index='summary', host=None, source=None, fext='common_action_model'): """ The purpose of this method is to create arbitrary splunk events from the list of events in the ModularAction instance. Please use addevent() for populating the list of events in the ModularAction instance. @param index: The index to write the events to. Defaults to "summary". @param host: The value of host the events should take on. Defaults to None (auto). @param source: The value of source the events should take on. Defaults to None (auto). @param fext: The extension of the file to write out. Files are written to $SPLUNK_HOME/var/spool/splunk. File extensions can only contain word characters, dash, and have a 200 char max. "stash_" is automatically prepended to all extensions. Defaults to "common_action_model" ("stash_common_action_model"). Only override if you've set up a corresponding props.conf stanza to handle the extension. @return bool: Returns True if all events were successfully written Returns False if any errors were encountered """ ## internal makeevents method for normalizing strings ## that will be used in the various headers we write out def get_string(input, default): try: return input.replace('"', '_') except AttributeError: return default if self.events: ## sanitize file extension if not fext or not re.match('^[\w-]+$', fext): self.logger.warn('Requested file extension was ignored due to invalid characters') fext = 'common_action_model' elif len(fext)>200: self.logger.warn('Requested file extension was ignored due to length') fext = 'common_action_model' ## header header_line = ModularAction.DEFAULT_HEADER % ( get_string(index, ModularAction.DEFAULT_INDEX), get_string(host, ''), get_string(source, '')) ## process event chunks for chunk in (self.events[x:x+ModularAction.DEFAULT_CHUNK] for x in xrange(0, len(self.events), ModularAction.DEFAULT_CHUNK)): ## initialize output string default_breaker = '\n' + ModularAction.DEFAULT_BREAKER fout = header_line + default_breaker + (default_breaker).join(chunk) ## write output string try: fn = '%s_%s.stash_%s' % (mktimegm(time.gmtime()), random.randint(0, 100000), fext) fp = make_splunkhome_path(['var', 'spool', 'splunk', fn]) ## obtain fh with open(fp, 'w') as fh: fh.write(fout) except: signature = 'Error obtaining file handle during makeevents' self.message(signature, level=logging.ERROR, file_path=fp) self.logger.exception(signature + ' file_path=%s' % fp) return False self.message('Successfully created splunk events', event_count=len(self.events)) return True return False
def writeevents( self, index="summary", host=None, source=None, fext="common_action_model" ): """The purpose of this method is to create arbitrary splunk events from the list of events in the ModularAction instance. Please use addevent() for populating the list of events in the ModularAction instance. @param index: The index to write the events to. Defaults to "summary". @param host: The value of host the events should take on. Defaults to None (auto). @param source: The value of source the events should take on. Defaults to None (auto). @param fext: The extension of the file to write out. Files are written to $SPLUNK_HOME/var/spool/splunk. File extensions can only contain word characters, dash, and have a 200 char max. "stash_" is automatically prepended to all extensions. Defaults to "common_action_model" ("stash_common_action_model"). Only override if you've set up a corresponding props.conf stanza to handle the extension. @return bool: Returns True if all events were successfully written Returns False if any errors were encountered """ ## internal makeevents method for normalizing strings ## that will be used in the various headers we write out def get_string(input, default): try: return input.replace('"', "_") except AttributeError: return default if self.events: ## sanitize file extension if not fext or not re.match("^[\w-]+$", fext): self.logger.warn( "Requested file extension was ignored due to invalid characters" ) fext = "common_action_model" elif len(fext) > 200: self.logger.warn("Requested file extension was ignored due to length") fext = "common_action_model" ## header header_line = ModularAction.DEFAULT_HEADER % ( get_string(index, ModularAction.DEFAULT_INDEX), get_string(host, ""), get_string(source, ""), ) ## process event chunks for chunk in ( self.events[x : x + ModularAction.DEFAULT_CHUNK] for x in range(0, len(self.events), ModularAction.DEFAULT_CHUNK) ): ## initialize output string default_breaker = "\n" + ModularAction.DEFAULT_BREAKER fout = header_line + default_breaker + (default_breaker).join(chunk) ## write output string try: fn = "%s_%s.stash_%s" % ( mktimegm(time.gmtime()), random.randint(0, 100000), fext, ) fp = make_splunkhome_path(["var", "spool", "splunk", fn]) ## obtain fh with open(fp, "w") as fh: fh.write(fout) except: signature = "Error obtaining file handle during makeevents" self.message(signature, level=logging.ERROR, file_path=fp) self.logger.exception(signature + " file_path=%s" % fp) return False self.message( "Successfully created splunk events", event_count=len(self.events) ) return True return False
def __init__(self): self.DEFAULT_INITIAL_DELAY = 300 self.DEFAULT_RETRIES = 3 self.DEFAULT_RETRY_INTERVAL = 60 self.DEFAULT_TIMEOUT_INTERVAL = 30 self.DEFAULT_SKIP_HEADER_LINES = 0 self.DEFAULT_THREAD_POOL_SIZE = 5 self.DEFAULT_THREAD_SLEEP_INTERVAL = 300 self.DEFAULT_MERGE_THREAD_SLEEP_INTERVAL = 15 self.HANDLER_HTTP = 'http' self.HANDLER_HTTPS = 'https' self.HANDLER_LOOKUP = 'lookup' self.HANDLER_TAXII = 'taxii' # Dictionary of supported protocol handlers. self.PROTOCOL_HANDLERS = { self.HANDLER_HTTP: HttpProtocolHandler, self.HANDLER_HTTPS: HttpProtocolHandler, self.HANDLER_LOOKUP: NoopProtocolHandler, self.HANDLER_TAXII: TaxiiHandler } # Regex for extracting key=value strings self.KV_REGEX = re.compile(r'(\w+)=([\w:$]+|"[^"]+")') # Regex for extracting interpolated arguments. self.ARG_REGEX = re.compile(r'\$([A-Za-z0-9_]+):([A-Za-z0-9_]+)\$') # Default target directory self.THREAT_INTEL_TARGET_PATH = make_splunkhome_path([ 'etc', 'apps', 'SA-ThreatIntelligence', 'local', 'data', 'threat_intel' ]) # Default exclusions - these are the types of threatlist that don't get # written to self.THREAT_INTEL_TARGET_PATH self.THREAT_INTEL_EXCLUSIONS = ['alexa', 'asn', 'mozilla_psl', 'tld'] self.DEPRECATED_STANZAS = [ 'maxmind_geoip_asn_ipv4', 'maxmind_geoip_asn_ipv6' ] scheme_args = { 'title': "Threat Intelligence Downloads", 'description': "Downloads threat lists or other threat intelligence feeds from remote hosts.", 'use_external_validation': "true", 'streaming_mode': "xml", 'use_single_instance': "false" } args = [ # General options Field( "type", "Threatlist Type", """Type of threat list, such as "malware". Must be "taxii" for TAXII feeds.""", required_on_create=True, required_on_edit=True), Field("description", "Description", """Description of the threat list.""", required_on_create=True, required_on_edit=True), Field( "max_age", "Maximum age", "Maximum age for threat content (provided for use by consumers of threat content)", required_on_create=False, required_on_edit=False), Field("target", "Target", """Target lookup table.""", required_on_create=False, required_on_edit=False), Field("url", "URL", """URL or location of the threatlist.""", required_on_create=True, required_on_edit=True), RangeField( "weight", "Weight", """Weight for IPs that appear on this threatlist. A higher weight increases an IP's risk score.""", low=1, high=100, required_on_create=True, required_on_edit=True), # Download options Field("post_args", "POST arguments", """POST arguments to send to the remote URL.""", required_on_create=False, required_on_edit=False), IntegerField( "retries", "Retries", "the number of times to retry a failed download. [Defaults to {0}]" .format(self.DEFAULT_RETRIES), required_on_create=True, required_on_edit=True), IntegerField( "retry_interval", "Retry interval", "Interval between attempts to download this threat list, in seconds. [Defaults to {0}]" .format(self.DEFAULT_RETRY_INTERVAL), required_on_create=True, required_on_edit=True), Field( "site_user", "Remote site user", "The user name for authentication to the remote site, if required. Must correspond to a Splunk stored credential.", required_on_create=False, required_on_edit=False), IntegerField( "timeout", "Timeout interval", "Time before regarding a download attempt as failed, in seconds. [Defaults to {0}]" .format(self.DEFAULT_TIMEOUT_INTERVAL), required_on_create=True, required_on_edit=True), # Proxy options RangeField("proxy_port", "Proxy port", "The proxy server port, if required.", low=0, high=65535, required_on_create=False, required_on_edit=False), Field( "proxy_server", "Proxy server", "The proxy server, if required. Only used by HTTP(S) protocol.", required_on_create=False, required_on_edit=False), Field( "proxy_user", "Proxy user", "The proxy user name, if required. Must correspond to a Splunk stored credential. Only used by HTTP(s) protocol.", required_on_create=False, required_on_edit=False), # Parser options Field("delim_regex", "Delimiting regex", "Regular expression used to delimit the input.", required_on_create=False, required_on_edit=False), Field("extract_regex", "Extracting regex", "Regular expression used to extract fields from the input.", required_on_create=False, required_on_edit=False), Field("fields", "Fields", "The list of fields to extract from the threat list.", required_on_create=False, required_on_edit=False), Field( "ignore_regex", "Ignoring regex", "Regular expression for lines to be ignored in the threat list.", required_on_create=False, required_on_edit=False), Field("skip_header_lines", "Skip header lines", "Number of header lines to skip, if any. [Defaults to {0}]". format(self.DEFAULT_SKIP_HEADER_LINES), required_on_create=False, required_on_edit=False), # General processing options - should only be set in default stanza. IntegerField( "initial_delay", "Initial delay", """Initial delay in seconds before the modular input begins executing, IF not being executed on a cron schedule. Used to alleviate startup load. [Defaults to {0}]""" .format(self.DEFAULT_INITIAL_DELAY), required_on_create=False, required_on_edit=False), Field("master_host", "Master host", "The master host for this download.", required_on_create=False, required_on_edit=False), ] self._app = 'SA-ThreatIntelligence' self._owner = 'nobody' self._name = 'Threatlist' self._logger = setup_logger(name='threatlist', level=logging.INFO) super(ThreatlistModularInput, self).__init__(scheme_args, args)
from splunktaucclib.splunk_aoblib.rest_helper import TARestHelper import logging from splunktaucclib.logging_helper import get_logger from splunktaucclib.cim_actions import ModularAction import requests from builtins import str import csv import gzip import sys try: from splunk.clilib.bundle_paths import make_splunkhome_path except ImportError: from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path sys.path.insert(0, make_splunkhome_path(["etc", "apps", "Splunk_SA_CIM", "lib"])) class ModularAlertBase(ModularAction): def __init__(self, ta_name, alert_name): self._alert_name = alert_name # self._logger_name = "modalert_" + alert_name self._logger_name = alert_name + "_modalert" self._logger = get_logger(self._logger_name) super(ModularAlertBase, self).__init__( sys.stdin.read(), self._logger, alert_name ) self.setup_util_module = None self.setup_util = None self.result_handle = None self.ta_name = ta_name
""" Copyright (C) 2009-2020 Splunk Inc. All Rights Reserved. REST endpoint handler for getting the devices in the kvstore belonging to a specific user """ import base64 import json import sys from splunk.clilib.bundle_paths import make_splunkhome_path from splunk.persistconn.application import PersistentServerConnectionApplication sys.path.append( make_splunkhome_path(['etc', 'apps', 'splunk_secure_gateway', 'bin'])) sys.path.append( make_splunkhome_path(['etc', 'apps', 'splunk_secure_gateway', 'lib'])) from http import HTTPStatus from spacebridgeapp.util import py23 from spacebridgeapp.logging import setup_logging from spacebridgeapp.util import constants from spacebridgeapp.rest.base_endpoint import BaseRestHandler from spacebridgeapp.rest.services.splunk_service import get_devices_for_user from spacebridgeapp.exceptions.key_not_found_exception import KeyNotFoundError LOGGER = setup_logging(constants.SPACEBRIDGE_APP_NAME + ".log", "rest_user_devices") class DevicesForUser(BaseRestHandler, PersistentServerConnectionApplication):
def setup_logging(logfile_name=None, logger_name=None, logger=None, level=logging.INFO, is_console_header=False, log_format=LOG_DEFAULT_FMT, is_propagate=False): ''' Setup logging @param logfile_name: log file name @param logger_name: logger name (if logger specified then we ignore this argument) @param logger: logger object @param level: logging level @param is_console_header: set to true if console logging is required @param log_format: log message format @param is_propagate: set to true if you want to propagate log to higher level @return: logger ''' if (logfile_name is None or logger_name is None) and logger is None: raise ValueError(( "log_name or logger_name is not specified and logger object is not provided." )) if logger is None: # Logger is singleton so if logger is already defined it will return old handler logger = logging.getLogger(logger_name) logger = logger.logger if isinstance(logger, CloudgatewayLogger) else logger # Save the handlers before overwriting logger loghandlers = logger.handlers # If handlers is already defined then do not create new handler, this way we can avoid file opening again # which is issue on windows see ITOA-2439 for more information # Now we are checking if we need create new handler(s) hasFileHandler = False hasConsoleHandler = False handlerFormat = None for handler in loghandlers: if isinstance(handler, logging.handlers.RotatingFileHandler): handlerFormat = handlerFormat if handlerFormat else handler.formatter hasFileHandler = True elif isinstance(handler, logging.StreamHandler): handlerFormat = handlerFormat if handlerFormat else handler.formatter hasConsoleHandler = True # If logger_name is None: will create a child logger with different properties from parent. # If the given logger_name is not equal to the existing logger's name, also will create a child logger if logger_name is None or logger.name != logger_name: # dot(.) in the two log names make the new logger is the child of the existing logger, # so new handlers added to the new one will not impact the old one. logger = logging.getLogger( "%s.%s" % (logger.name, logger_name if logger_name else "sub")) logger.propagate = is_propagate # Prevent the log messages from being duplicated in the python.log file logger.setLevel(level) if not hasFileHandler: try: lockdir = make_splunkhome_path(['var', CLOUDGATEWAY, 'lock']) if not os.path.exists(os.path.dirname(lockdir)): os.mkdir(make_splunkhome_path(['var', CLOUDGATEWAY])) os.mkdir(make_splunkhome_path(['var', CLOUDGATEWAY, 'lock'])) elif not os.path.exists(lockdir): os.mkdir(lockdir) except OSError as ose: #Swallow all "File exists" errors - another thread/process beat us to the punch if ose.errno != 17: raise logfile = logfile_name if os.path.basename(logfile_name) == logfile_name: logfile = make_splunkhome_path( ['var', 'log', 'splunk', logfile_name]) #Note that there are still some issues with windows here, going to make it so that we dont file_handler = logging.handlers.RotatingFileHandler(logfile, maxBytes=2500000, backupCount=5) file_handler.setFormatter( handlerFormat if handlerFormat else logging.Formatter(log_format)) logger.addHandler(file_handler) if is_console_header and not hasConsoleHandler: console_handler = logging.StreamHandler() console_handler.setFormatter( handlerFormat if handlerFormat else logging.Formatter(log_format)) logger.addHandler(console_handler) # Read logging level information from log.cfg so it will overwrite log # Note if logger level is specified on that file then it will overwrite log level LOGGING_DEFAULT_CONFIG_FILE = make_splunkhome_path(['etc', 'log.cfg']) LOGGING_LOCAL_CONFIG_FILE = make_splunkhome_path(['etc', 'log-local.cfg']) LOGGING_STANZA_NAME = 'python' setupSplunkLogger(logger, LOGGING_DEFAULT_CONFIG_FILE, LOGGING_LOCAL_CONFIG_FILE, LOGGING_STANZA_NAME, verbose=False) return logger
from app_greynoise_declare import ta_name as APP_NAME, ta_lib_name as APP_LIB_NAME from functools import partial try: from itertools import izip except: # For Python 3.x izip = zip import threading import time import traceback from splunk.clilib.bundle_paths import make_splunkhome_path if sys.version_info[0] == 2: # Import the package from python2 directory when using python 2 sys.path.append( make_splunkhome_path( ['etc', 'apps', APP_NAME, 'bin', APP_LIB_NAME, 'python2_lib'])) from python2_lib.concurrent.futures import ThreadPoolExecutor else: # Import the package from python3 directory when using python 3 sys.path.append( make_splunkhome_path( ['etc', 'apps', APP_NAME, 'bin', APP_LIB_NAME, 'python3_lib'])) from python3_lib.concurrent.futures import ThreadPoolExecutor import six from requests.exceptions import ConnectionError, RequestException from greynoise.exceptions import RateLimitError, RequestFailure from utility import get_dict, nested_dict_iter
def getLogFilePath(self): return make_splunkhome_path( ['var', 'log', 'splunk', self.logfile_name])
def generate(self): event_count = 0 csv_file_names = [] if self.populate_from_cim: csv_file_names.append( make_splunkhome_path([ 'etc', 'apps', 'Splunk_SA_CIM', 'lookups', 'cim_corporate_email_domains.csv' ])) csv_file_names.append( make_splunkhome_path([ 'etc', 'apps', 'Splunk_SA_CIM', 'lookups', 'cim_corporate_web_domains.csv' ])) # Make sure we just get the base file name from file. In case there was some directory traversal going on. if self.domainlist_file_name: sanitized_file_name = os.path.basename(self.domainlist_file_name) lookup_path = make_splunkhome_path([ 'etc', 'apps', 'DA-ESS-ContentUpdate', 'lookups', sanitized_file_name ]) # Make sure there really isn't any directory traversal going on. valid_path = True if "../" in lookup_path: valid_path = False # Make sure the path that is created by adding the file name to the path is the same as the # absolute path if lookup_path != os.path.abspath(lookup_path): valid_path = False if valid_path: csv_file_names.append(lookup_path) domains_to_twist = [] for csv_file_name in csv_file_names: if os.path.exists(csv_file_name): # this is nasty but works .. please forgive me if sys.version_info >= (3, 0): csv_file = open(csv_file_name, "r", newline='') else: csv_file = open(csv_file_name, "r") for input_domain in csv.DictReader(csv_file): if input_domain['domain'] not in domains_to_twist: domains_to_twist.append(input_domain['domain']) # if a single domain is passed lets just calculate that if self.domain != '': domains_to_twist = [] domains_to_twist.append(self.domain) for domain_to_twist in domains_to_twist: domain_to_twist = domain_to_twist.lstrip('*') dfuzz = DomainFuzz(domain_to_twist) dfuzz.generate() domains = dfuzz.domains for domain in domains: # We don't want to keep the original domain if domain['domain-name'] in domain_to_twist: continue event_count += 1 yield { '_time': time.time(), 'event_no': event_count, '_raw': domain['domain-name'], 'domain': '*' + domain['domain-name'] + '*', 'original_domain': domain_to_twist }
from __future__ import print_function import csv import gzip import sys try: from splunk.clilib.bundle_paths import make_splunkhome_path except ImportError: from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path sys.path.insert(0, make_splunkhome_path(["etc", "apps", "Splunk_SA_CIM", "lib"])) import requests from cim_actions import ModularAction from logging_helper import get_logger import logging from splunk_aoblib.rest_helper import TARestHelper from splunk_aoblib.setup_util import Setup_Util class ModularAlertBase(ModularAction): def __init__(self, ta_name, alert_name): self._alert_name = alert_name # self._logger_name = "modalert_" + alert_name self._logger_name = alert_name + "_modalert" self._logger = get_logger(self._logger_name) super(ModularAlertBase, self).__init__(sys.stdin.read(), self._logger, alert_name) self.setup_util_module = None self.setup_util = None
'status': 200 }) ** NOTE ** Do not pass a deferred to defer.returnValue. This is not supported by Twisted, but more annoyingly, will cause an infinite loop that will prevent your request from succeeding AND prevent you from manually stopping Splunk with the splunk stop command line command. You will have to manually kill the process with something like: lsof -i tcp:8089 kill -9 <process IDs from the previous command> """ import sys import threading from splunk.clilib.bundle_paths import make_splunkhome_path sys.path.append(make_splunkhome_path(['etc', 'apps', 'splunk_app_cloudgateway', 'bin'])) from spacebridgeapp.util import py23 from spacebridgeapp import logging from spacebridgeapp.rest import base_endpoint from spacebridgeapp.rest.clients.async_client_factory import AsyncClientFactory from spacebridgeapp.util import constants from splunk import rest from splunk.persistconn import application from twisted.internet import reactor from twisted.internet import threads LOGGER = logging.setup_logging(constants.SPACEBRIDGE_APP_NAME + '.log', 'async_bridge_v2')
import json import logging import logging.handlers import splunk.appserver.mrsparkle.controllers as controllers import splunk.entity as en import splunk.clilib.bundle_paths as bp import sys import urllib from splunk import AuthenticationFailed, AuthorizationFailed, ResourceNotFound, BadRequest from splunk.appserver.mrsparkle.lib import jsonresponse from splunk.appserver.mrsparkle.lib.decorators import expose_page from splunk.appserver.mrsparkle.lib.routes import route from splunk.rest import simpleRequest from splunk.clilib.bundle_paths import make_splunkhome_path sys.path.append(make_splunkhome_path(['etc', 'apps', 'SA-Utils', 'lib'])) from SolnCommon.log import setup_logger, SHORT_FORMAT # Import the correlation search helper class sys.path.append( make_splunkhome_path(["etc", "apps", "SA-ThreatIntelligence", "bin"])) from correlation_search import CorrelationSearch from custom_search_builder.base import CustomSearchBuilderBase from shortcuts import Severity logger = setup_logger('correlation_search_controller', level=logging.DEBUG, format=SHORT_FORMAT) class CorrelationSearchBuilder(controllers.BaseController):
#!/usr/bin/env python # encoding: utf-8 import sys import logging import argparse import signal # Set path so libraries will load from splunk.clilib.bundle_paths import make_splunkhome_path sys.path.insert(0, make_splunkhome_path(['etc', 'apps', 'SA-Eventgen', 'lib'])) sys.path.insert( 0, make_splunkhome_path( ['etc', 'apps', 'SA-Eventgen', 'lib', 'splunk_eventgen', 'lib'])) from modinput.fields import BooleanField, Field from xmloutput import setupLogger, XMLOutputManager from modinput import ModularInput from splunk_eventgen import eventgen_core from splunk_eventgen.lib import eventgenconfig # Initialize logging logger = setupLogger( logger=None, log_format='%(asctime)s %(levelname)s [Eventgen] %(message)s', level=logging.DEBUG, log_name="modinput_eventgen.log", logger_name="eventgen_app") class SimpleNamespace(dict):
# Importing the cim_actions.py library # A. Import make_splunkhome_path # B. Append library path to sys.path # C. Import ModularAction from cim_actions try: if version >= 6.4: from splunk.clilib.bundle_paths import make_splunkhome_path else: from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path except ImportError as e: raise ImportError("Import splunk sub libraries failed\n") sys.path.append( make_splunkhome_path(["etc", "apps", "TA-Demisto", "bin", "lib"])) try: from cim_actions import ModularAction except: sys.exit(3) logger = DemistoConfig.get_logger("DEMISTOALERT") modular_action_logger = ModularAction.setup_logger('demisto_modalert') class DemistoAction(ModularAction): def create_demisto_incident(self, result, authkey, verify,
import requests import sys from json import dumps from splunk.clilib.bundle_paths import make_splunkhome_path sys.path.append(make_splunkhome_path(['etc', 'apps', 'opsgenie_itsi', 'lib'])) sys.path.append(make_splunkhome_path(['etc', 'apps', 'SA-ITOA', 'lib'])) from ITOA.setup_logging import setup_logging from itsi.event_management.sdk.custom_event_action_base import CustomEventActionBase OPSGENIE_LOG_FILE = 'opsgenie_itsi.log' OPSGENIE_URL = "opsgenie_url" CORRELATION_KEYS = [ 'alert_level', 'alert_severity', 'alert_value', 'alert_color', 'alert_period', 'all_info', 'change_type', 'composite_kpi_id', 'composite_kpi_name', 'color', 'description', 'drilldown_search_search', 'drilldown_uri', 'enabled', 'entity_title',
# CORE SPLUNK IMPORTS (not needed) # import splunk # import splunk.search as splunkSearch # from splunk.rest import simpleRequest # import splunk.version as ver # import splunk.clilib.cli_common # import splunk.auth, splunk.search # import splunk.Intersplunk as si try: from splunk.clilib.bundle_paths import make_splunkhome_path except ImportError: from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path sys.path.append(make_splunkhome_path(["etc", "apps", "Splunk_SA_CIM", "lib"])) sys.path.append(make_splunkhome_path(["etc", "apps", "SA-Utils", "lib"])) sys.path.append( make_splunkhome_path(["etc", "apps", "Splunk_TA_paloalto", "bin", "lib"])) sys.path.append( make_splunkhome_path([ "etc", "apps", "Splunk_TA_paloalto", "bin", "lib", "pan-python", "lib" ])) sys.path.append( make_splunkhome_path( ["etc", "apps", "Splunk_TA_paloalto", "bin", "lib", "pandevice"])) from cim_actions import ModularAction import common from pandevice.firewall import Firewall
import splunk.version from splunk.clilib.bundle_paths import get_slaveapps_base_path from splunk.models.app import App from xmloutput import setupLogger from .fields import (BooleanField, Field, FieldValidationException, IntervalField) try: from splunk.clilib.bundle_paths import make_splunkhome_path except ImportError: from splunk.appserver.mrsparkle.lib.util import make_splunkhome_path if 'slave' in splunk.clilib.cli_common.getMergedConf('server').get('clustering', {}).get('mode', {}): sys.path.append(os.path.join(get_slaveapps_base_path(), "@appname@", "lib")) else: sys.path.append(make_splunkhome_path(["etc", "apps", "@appname@", "lib"])) # Define logger using the name of the script here, versus in the modular_input class. # logger = log.setup_logger(name='python_modular_input', level=logging.INFO) logger = setupLogger(logger=None, log_format='%(asctime)s %(levelname)s [ModularInput] %(message)s', level=logging.INFO, log_name="python_modular_input.log", logger_name="modinput") class ModularInputConfig(object): def __init__(self, server_host, server_uri, session_key, checkpoint_dir, configuration): self.server_host = server_host self.server_uri = server_uri self.session_key = session_key self.checkpoint_dir = checkpoint_dir self.configuration = configuration
# Prune directories from other apps so that we don't step on each other with our imports (see http://lukemurphey.net/issues/1281) paths_to_remove = [] for path in sys.path: if ('/etc/apps/' in path and not '/etc/apps/google_drive' in path) or ( '\\etc\\apps\\' in path and not '\\etc\\apps\\google_drive' in path): paths_to_remove.append(path) for path in paths_to_remove: sys.path.remove(path) # Remove the httplib2 library since this causes issues (https://lukemurphey.net/issues/2540) try: shutil.rmtree( make_splunkhome_path([ 'etc', 'apps', 'google_drive', 'bin', 'google_drive_app', 'httplib2' ])) except OSError: # The library doesn't exist; that's ok pass # Add the imports # Put the google_drive_app app first so that the app uses the newer version of requests which gspread expects sys.path.insert( 0, make_splunkhome_path( ['etc', 'apps', 'google_drive', 'bin', 'google_drive_app'])) sys.path.append( make_splunkhome_path([ 'etc', 'apps', 'google_drive', 'bin', 'google_drive_app', 'oauth2client'
Copyright (C) 2005 - 2015 Splunk Inc. All Rights Reserved. ''' import datetime import json import logging import os import re import shutil import sys import time import splunk import splunk.rest import splunk.util from splunk.clilib.bundle_paths import make_splunkhome_path sys.path.append(make_splunkhome_path(["etc", "apps", "SA-Utils", "lib"])) from SolnCommon.log import setup_logger from SolnCommon.lookups import get_temporary_checkpoint_file from SolnCommon.modinput import ModularInput from SolnCommon.modinput.fields import Field from SolnCommon.modinput.fields import IntegerField from SolnCommon.modinput.fields import RangeField from SolnCommon.protocols import HttpProtocolHandler from SolnCommon.protocols import NoopProtocolHandler from SolnCommon.credentials import CredentialManager from taxii_client import TaxiiHandler, TaxiiHandlerException class ThreatlistModularInput(ModularInput): def __init__(self):
""" This file is used for xMatters Splunk Alert Actions """ import sys import json # pylint: disable = import-error # pylint: disable = wrong-import-position from splunk.clilib.bundle_paths import make_splunkhome_path sys.path.append( make_splunkhome_path(['etc', 'apps', 'xmatters_alert_action', 'lib'])) from common_utils.setup_logging import setup_logging from common_utils.password import get_password from xmatters_sdk.xm_event import XMattersEvent from xmatters_sdk.xm_client import XMattersClient # pylint: enable = wrong-import-position # pylint: enable = import-error # The name of the log file to write to XM_ALERT_ACTION_LOG = 'xmatters_alert_action.log' # The keys from the alert to send to the xMatters Event KEYS = [ 'app', 'cron.schedule', 'description', 'name', 'next_scheduled_time', 'owner', 'results_link', 'search', 'trigger.date', 'trigger.time', 'type', 'view_link', 'alert.expires', 'result.source', 'result.host', 'result.sourcetype', 'result.index', 'result.timestamp', 'result.DiskSpacePrct', 'result.source', 'result.splunk_server' ]