Example #1
0
class snowIncidentCommand(GeneratingCommand):

    assigned = Option(require=True, validate=validators.List())
    assigned_by = Option(require=False)
    daysAgo = Option(require=False, validate=validators.Integer(0))
    active = Option(require=True, validate=validators.Boolean())
    limit = Option(require=False, validate=validators.Integer(0))
    env = Option(require=False)

    def generate(self):
        self.logger.debug('snowIncidentCommand: %s', self)
        searchinfo = self.metadata.searchinfo
        app = AppConf(searchinfo.splunkd_uri, searchinfo.session_key)
        env = self.env.lower() if self.env else 'production'
        conf = app.get_config('getsnow')[env]
        assigned_by = 'assignment_group' if self.assigned_by == 'group' else 'assigned_to'
        assignment = {'table': 'sys_user_group', 'field': 'name'} if self.assigned_by == 'group' else {'table': 'sys_user', 'field': 'user_name'}
        limit = self.limit if self.limit else 10000
        snowincident = snow(conf['url'], conf['user'], conf['password'])
        sids = snowincident.getsysid(assignment['table'], assignment['field'], self.assigned)
        filters = snowincident.filterbuilder(assigned_by, sids)
        glide = 'sys_created_on>=javascript:gs.daysAgo({})'.format(self.daysAgo) if self.daysAgo else ''
        url = snowincident.reqencode(filters, table='incident', glide_system=glide, active=self.active, sysparm_limit=limit)
        for record in snowincident.getrecords(url):
            record = snowincident.updatevalue(record, sourcetype='snow:incident')
            record['_raw'] = json.dumps(record)
            record = dictexpand(record)
            yield record
    def test_list(self):

        validator = validators.List()
        self.assertEqual(validator.__call__(''), [])
        self.assertEqual(validator.__call__('a,b,c'), ['a', 'b', 'c'])
        self.assertRaises(ValueError, validator.__call__, '"a,b,c')

        self.assertEqual(validator.__call__([]), [])
        self.assertEqual(validator.__call__(None), None)

        validator = validators.List(validators.Integer(1, 10))
        self.assertEqual(validator.__call__(''), [])
        self.assertEqual(validator.__call__('1,2,3'), [1, 2, 3])
        self.assertRaises(ValueError, validator.__call__, '1,2,0')

        self.assertEqual(validator.__call__([]), [])
        self.assertEqual(validator.__call__(None), None)
class GenerateCurrentWeatherCommand(GeneratingCommand):

    locations = Option(require=True, validate=validators.List())

    def generate(self):
        self.logger.debug("Generating %s events" % self.locations)
        n = 1
        for i in self.locations:
            Location = location(i, TomTomKey)
            CurrentWeather = currentweather(Location, WeatherKey)
            Lon = "longitude=\"" + str(Location.LocationLon) + "\" "
            Lat = "lattitude=\"" + str(Location.LocationLat) + "\" "
            Address = "address=\"" + str(Location.Address) + "\" "
            Temp = "temperature=\"" + str(CurrentWeather.Temperature) + "\""
            Clouds = "clouds=\"" + str(CurrentWeather.Cloudiness) + "\""
            Rain = "rain=\"" + str(CurrentWeather.Rainfall) + "\""
            text = Lon + Lat + Address + Temp + Clouds + Rain
            yield {'_time': time.time(), 'event_no': n, '_raw': text}
            n += 1
Example #4
0
class snowUserCommand(GeneratingCommand):

    user_name = Option(require=True, validate=validators.List())
    daysAgo = Option(require=False, validate=validators.Integer(0))
    env = Option(require=False)

    def generate(self):
        self.logger.debug('snowuser: %s', self)
        searchinfo = self.metadata.searchinfo
        app = AppConf(searchinfo.splunkd_uri, searchinfo.session_key)
        env = self.env.lower() if self.env else 'production'
        conf = app.get_config('getsnow')[env]
        snowuser = snow(conf['url'], conf['user'], conf['password'])
        filters = snowuser.filterbuilder('user_name', self.user_name)
        query_string = snowuser.reqencode(filters, 'sys_user')
        user_sid = []
        for record in snowuser.getrecords(query_string):
            user_sid.append(record['sys_id'])
            record = snowuser.updatevalue(record, sourcetype='snow:user')
            record['_raw'] = json.dumps(record)
            record = dictexpand(record)
            yield record
        filters = snowuser.filterbuilder('assigned_to', user_sid)
        url = snowuser.reqencode(filters, table='alm_asset')
        for record in snowuser.getrecords(url):
            record = snowuser.updatevalue(record, sourcetype='snow:asset')
            record['_raw'] = json.dumps(record)
            record = dictexpand(record)
            yield record
        filters = snowuser.filterbuilder('opened_by', user_sid)
        glide = 'sys_created_on>=javascript:gs.daysAgo({})'.format(
            self.daysAgo) if self.daysAgo else ''
        url = snowuser.reqencode(filters, table='incident', glide_system=glide)
        for record in snowuser.getrecords(url):
            record = snowuser.updatevalue(record, sourcetype='snow:incident')
            record['_raw'] = json.dumps(record)
            record = dictexpand(record)
            yield record
Example #5
0
class EMGroupEntityMatchCommand(StreamingCommand):
    """ Match groups and entities based on group filter and entity dimensions

    ##Syntax

    .. code-block::
        emgroupentitymatch selectedGroupIds="states,aws_instances" retainInput=false

    ##Description
        This custom search command will add 'group_id' and 'group_title' to all input
        entity records if they are members of a group - otherwise it will be omitted from the results
        unless retainInput is 'true'.

        Options:
        1. selectedGroupIds -- indicates the selected groups that you want to match against the entities
        2. retainInput -- indicates if the original input records should be attached to the output records
                          if true, those records will have 'group_id' and 'group_title' set to 'N/A' for you
                          to distinguish them.

    ##Example

    .. code-block::
        | inputlookup em_entities
        | emgroupentitymatch selectedGroupIds="states,aws_instances" retainInput=false
        | stats count by group_title

    """

    _group_records = None

    selected_group_ids = Option(doc='List of selected group ids, separated by comma.',
                                name='selectedGroupIds',
                                default=None,
                                require=False,
                                validate=validators.List())
    retain_input_record = Option(doc='Boolean to indicate if user wants the input '
                                     'record to be added to the output without modification.',
                                 name='retainInput',
                                 default=False,
                                 require=False,
                                 validate=validators.Boolean())

    def stream(self, records):
        """
        Generator function that processes and yields event records to the Splunk stream pipeline.
        :param records: splunk event records
        :return:
        """
        self._setup_group_records()
        self.logger.debug('EMGroupEntityMatchCommand: %s', self)  # logs command line
        for record in records:
            if self.retain_input_record:
                record['group_id'] = 'N/A'
                record['group_title'] = 'N/A'
                yield record
            if len(self._group_records) > 0:
                for group_record in self._group_records:
                    if self._match_group_entity(record, group_record.group_filter):
                        record['group_id'] = group_record.group_id
                        record['group_title'] = group_record.group_content.get('title')
                        yield record
            else:
                yield record

    def _setup_group_records(self):
        """
        Grabs the groups from KV Store and builds out the filter objects if they have yet to be built
        :return: None
        """
        if self._group_records is None:
            collection = self.service.kvstore[STORE_GROUPS]
            group_data = collection.data.query()
            if self.selected_group_ids:
                selected_group_set = set(self.selected_group_ids)
                group_data = filter(lambda g: g['_key'] in selected_group_set, group_data)
            group_records = []
            for group in group_data:
                filter_val = group.get('filter')
                d = {}
                if filter_val:
                    for v in filter_val.split(','):
                        dim_name, dim_val = v.strip().split('=')
                        d.setdefault(dim_name, set()).add(dim_val)
                group_records.append(GroupRecord(group_id=group['_key'],
                                                 group_filter=d,
                                                 group_content=group))
            self._group_records = group_records

    def _match_group_entity(self, record, filter_dict):
        """
        Verify whether this record matches to group filters.
        Support wildcard in end of string
        :param record:
        :param filter_dict:
        :return:
        """
        for dim_name, dim_val_set in filter_dict.iteritems():
            record_vals = record.get('dimensions.%s' % dim_name)
            if not record_vals:
                return False
            if not isinstance(record_vals, list):
                record_vals = [record_vals]
            matched = False
            for rval in record_vals:
                # check if record value is one of the filter values
                if rval in dim_val_set:
                    matched = True
                    break
                # otherwise check if record value matches any of the fuzzy match values
                fuzzy_matches = filter(lambda v: v.endswith('*'), dim_val_set)
                if len(fuzzy_matches):
                    matched = any(rval.startswith(v[:-1]) for v in fuzzy_matches)
            if not matched:
                return False
        return True
class MatchupCommandRex(StreamingCommand):
    """
    SCP v2
    """

    opt_outputcols = Option(doc='''
        **Syntax:** **outputcols=***<CSV list of fields>*
        **Description:** The field(s) that should be returned from the table.
        **Default:** <empty> returns all fields''',
                            name='outputcols',
                            require=False,
                            default='',
                            validate=validators.List())

    opt_regex = Option(doc='''
        **Syntax:** **regex=***<regex field in lookup>*
        **Description:** Names the field in the lookup that stores the regex.
        **Default:** none''',
                       name='regex',
                       require=True)

    opt_field = Option(doc='''
        **Syntax:** **field=***<field to match>*
        **Description:** Names the field in the event that is matched to the regex.
        **Default:** none''',
                       name='field',
                       require=True)

    opt_lookup = Option(doc='''
        **Syntax:** **lookup=***<lookup_name>*
        **Description:** name of a valid accessible lookup
        **Default:** none''',
                        name='lookup',
                        require=True)

    opt_where = Option(doc='''
        **Syntax:** **where=***<valid where clause>*
        **Description:** where clause that can be used with inputlookup to restrict the rows returned and improve performance
        **Default:** none''',
                       name='where',
                       default=None,
                       require=False)

    opt_sort = Option(doc='''
        **Syntax:** **sort=***<string>*
        **Description:** SPL style sort string, ie '- order'
        **Default:** empty string''',
                      name='sort',
                      default=None,
                      require=False)

    opt_undefined_str = Option(doc='''
        **Syntax:** **undefined=***<string>*
        **Description:** string to use if the column doesn't exist in the output
        **Default:** undefined''',
                               name='undefined',
                               default='undefined',
                               require=False)

    table = None

    def __init__(self):
        super(MatchupCommandRex, self).__init__()
        self.perf = Perf(self.logger)

    def get_where_clause(self):
        where = self.opt_where
        self.logger.info("option where is (%s)" % where)
        if where is not None:
            return " WHERE " + where
        return ""

    def get_sort_clause(self):
        sort = ""
        if self.opt_sort is not None:
            sort = " | sort " + self.opt_sort
            self.logger.info("option sort is (%s)" % sort)
        else:
            self.logger.info("option sort is empty")
        return sort

    def get_table_fast_and_raw(self):
        if self.table is None:
            import requests, json
            self.perf.start("get_table_raw")

            res = requests.post(
                "%s/services/search/jobs/export" %
                (self.metadata.searchinfo.splunkd_uri),
                verify=False,
                headers={
                    "Authorization":
                    "Splunk %s" % self.metadata.searchinfo.session_key,
                    "Accept-Encoding": "gzip,deflate",
                    "User-Agent": "Python Requests",
                    "Content-Type": "application/json; charset=UTF-8",
                    "Accept": "application/json"
                },
                data={
                    "exec_mode":
                    "oneshot",
                    "output_mode":
                    "json",
                    "search":
                    "| inputlookup {} {} {}".format(self.opt_lookup,
                                                    self.get_where_clause(),
                                                    self.get_sort_clause())
                })
            table = MatchTableRex(self.opt_regex, self.logger)
            res.raise_for_status()
            for line in res.text.strip().split("\n"):
                table.add_row(json.loads(line)["result"])
                self.logger.info("Line is {}".format(line))
            self.perf.end("get_table_raw", "%d Rows" % table.len())
            self.table = table
        return self.table

    def stream(self, records):
        """
        Main entry point for the command
        :param records: An iterable stream of events from the command pipeline.
        :return: `None`.
        """
        self.logger.info("================== Stream")
        sz = 0
        self.perf.start("full stream method")
        for record in records:
            sz += 1
            perf_str = "checking row {}".format(sz)
            tbl = self.get_table_fast_and_raw()
            self.perf.start(perf_str)
            match = tbl.get_match(self.opt_field, record)
            if match is not None:
                for col in self.opt_outputcols:
                    try:
                        record[col] = match[col]
                    except KeyError:
                        record[col] = self.opt_undefined_str
            else:
                self.logger.warn("No match for row {}".format(record))
                record["debug"] = "nothing matched this record"
            self.perf.end(perf_str)
            yield record

        self.logger.info("Iterated over {} records".format(sz))
        self.perf.end("full stream method", "{} events processed".format(sz))
Example #7
0
class MACFormatCommand(StreamingCommand):
    """ Convert a given MAC address field to specified format.

    ##Syntax

    .. code-block::
        | macformat input=field-list output=field-list format=[cisco|dash|ieee|none]

    ## Description

    Convert the fields in the `input` field list to the ones in the `output` list; Both lists are
    optional. The `input` list defaults to `macaddress`. The`output` list is filled with fields in
    the `input` list it the `output` list is shorter than the `input`.

    The `format` option is one of [cisco|dash|ieee|none]. The default is `none`.

    Raises a ValueError exception if the MAC address is invalid.
    """
    format = Option(doc='''
        **Syntax:** **format=**`[cisco|dash|ieee|none]`
        **Description:** Format of the output MAC address. Defaults to `none`.''',
                    require=False,
                    validate=validators.Set('cisco', 'dash', 'ieee', 'none'))

    inputs = Option(doc='''
        **Syntax:** **inputs=***<field-list>*
        **Description:** A comma-delimited list of input fields to convert. Defaults to `macaddress`.''',
                    require=False,
                    validate=validators.List())

    outputs = Option(doc='''
        **Syntax:** **outputs=***<field-list>*
        **Description:** A comma-delimited list of fields for the results. Defaults to `inputs`.''',
                     require=False,
                     validate=validators.List())

    def prepare(self):
        """ Prepare the options.

        :return: :const:`None`
        :rtype: NoneType
        """
        self.toform = globals()['_' + (self.format or self.def_format)]
        inputs = self.inputs
        if inputs is None:
            self.inputs = inputs = self.def_inputs
        outputs = self.outputs
        if outputs is None:
            outputs = inputs
        elif len(outputs) < len(inputs):
            outputs += inputs[len(outputs):]
        self.outputs = outputs
        self.logger.debug(
            'MACFormatCommand.prepare: inputs = %s, outputs = %s', self.inputs,
            outputs)

    def stream(self, records):
        toform = self.toform
        inputs = self.inputs
        outputs = self.outputs
        if outputs is None:
            outputs = inputs
        elif len(outputs) < len(inputs):
            outputs += inputs[len(outputs):]
        for record in records:
            self.logger.debug('MACFormatCommand: record = %s', record)
            for i in range(len(inputs)):
                mac = record.get(inputs[i])
                if mac != None:
                    try:
                        record[outputs[i]] = toform(mac)
                    except Exception as err:
                        record[outputs[i]] = mac
                        self.logger.error('(input=%s) %s', inputs[i],
                                          err.message)
            yield record

    def __init__(self):
        StreamingCommand.__init__(self)
        appdir = path.dirname(path.dirname(__file__))
        defconfpath = path.join(appdir, "default", "app.conf")
        defconf = cli.readConfFile(defconfpath).get('macformat') or {}
        localconfpath = path.join(appdir, "local", "app.conf")
        localconf = (cli.readConfFile(localconfpath).get('macformat')
                     or {}) if path.exists(localconfpath) else {}
        self.def_format = localconf.get('format') or defconf.get(
            'format') or 'none'
        inputs = localconf.get('inputs') or defconf.get('inputs')
        self.def_inputs = re.split('[\s,]',
                                   inputs) if inputs else ['macaddress']
Example #8
0
class LdapFetchCommand(StreamingCommand):
    """  Filters and augments events with information from Active Directory.

    This command follows a search or similar command in the pipeline so that you can feed it events:

        .. code-block:: text
        | ldapsearch domain=splunk.com search="(objectClass=groups)"
        | ldapfetch domain=splunk.com dn=memberOf attributes="cn,description"

    """
    # region Command options

    attrs = Option(
        doc=''' Specifies a comma separated list of attributes to return as fields.
        **Default:** '*', specifying that all attributes should be returned as fields.
        ''',
        default=[ldap3.ALL_ATTRIBUTES], validate=validators.List())

    debug = Option(
        doc=''' True, if the logging_level should be set to DEBUG; otherwise False.
        **Default:** The current value of logging_level.
        ''',
        default=False, validate=validators.Boolean())

    decode = Option(
        doc=''' True, if Active Directory formatting rules should be applied to attribute types.
        **Default:** The value of decode as specified in the configuration stanza for domain.
        ''',
        default=True, validate=validators.Boolean())

    dn = Option(
        doc=''' Specifies the name of the field holding the distinguished name to fetch.
        ''',
        default='distinguishedName')

    domain = Option(
        doc=''' Specifies the Active Directory domain to search.
        ''',
        default='default')

    # endregion

    # region Command implementation

    def stream(self, records):
        """
        :param records: An iterable stream of events from the command pipeline.
        :return: `None`.

        """
        configuration = app.Configuration(self, is_expanded=True)
        expanded_domain = app.ExpandedString(self.domain)
        search_scope = ldap3.SEARCH_SCOPE_BASE_OBJECT
        search_filter = '(objectClass=*)'

        try:
            with configuration.open_connection_pool(self.attrs) as connection_pool:
                attribute_names = connection_pool.attributes
                for record in records:
                    dn = record.get(self.dn)
                    if not dn:  # got a falsey value
                        yield record
                        continue
                    domain = expanded_domain.get_value(record)
                    if domain is None:
                        yield record
                        continue
                    connection = connection_pool.select(domain)
                    if not connection:
                        self.logger.warning('dn="%s": domain="%s" is not configured', self.dn, domain)
                        yield record
                        continue
                    for search_base in dn if isinstance(dn, list) else (dn,):
                        if search_base:
                            try:
                                connection.search(search_base, search_filter, search_scope, attributes=attribute_names)
                            except ldap3.LDAPNoSuchObjectResult:
                                self.logger.warning(
                                    'dn="%s" domain="%s": distinguishedName="%s" does not exist', self.dn, domain,
                                    search_base)
                            else:
                                response = connection.response[0]
                                attributes = app.get_attributes(self, connection.response[0])
                                if attributes:
                                    self._augment_record(record, response['dn'], attributes, attribute_names)
                        yield record
                    pass

        except ldap3.LDAPException as error:
            self.error_exit(error, app.get_ldap_error_message(error, configuration))

        return

    def _augment_record(self, record, dn, attributes, attribute_names):
        """
        :param record:
        :param dn:
        :param attributes:
        :return:

        """
        record[self.dn] = dn
        for name in attribute_names:
            record[name] = attributes.get(name, '')
        return
class MatchupCommand(StreamingCommand):
    """
    SCP v2
    """

    opt_outputcols = Option(doc='''
        **Syntax:** **outputcols=***<CSV list of fields>*
        **Description:** The field(s) that should be returned from the table.
        **Default:** <empty> returns all fields''',
                            name='outputcols',
                            require=False,
                            default='',
                            validate=validators.List())

    opt_inputcols = Option(doc='''
        **Syntax:** **inputcols=***<CSV list of key fields to match>*
        **Description:** Each field in keys is matched with the lookup using the rules defined for matchup, this does not work like lookup, read the docs.
        **Default:** none''',
                           name='inputcols',
                           require=True,
                           validate=validators.List())

    opt_lookup = Option(doc='''
        **Syntax:** **lookup=***<lookup_name>*
        **Description:** name of a valid accessible lookup
        **Default:** none''',
                        name='lookup',
                        require=True)

    opt_where = Option(doc='''
        **Syntax:** **where=***<valid where clause>*
        **Description:** where clause that can be used with inputlookup to restrict the rows returned and improve performance
        **Default:** none''',
                       name='where',
                       default=None,
                       require=False)

    opt_undefined_str = Option(doc='''
        **Syntax:** **undefined=***<string>*
        **Description:** string to use if the column doesn't exist in the output
        **Default:** undefined''',
                               name='undefined',
                               default='undefined',
                               require=False)

    opt_debug = Option(doc='''
            **Syntax:** **debug=***<boolean>*
            **Description:** Wether the command should populate a error messege in a dubug field. 
            **Default:** none''',
                       name='debug',
                       default='False',
                       require=False)

    table = None

    def __init__(self):
        super(MatchupCommand, self).__init__()
        self.perf = Perf(self.logger)
        self.barocstringformat = BarocStringFormat()

    def normalize_debug(self, debug):
        return debug.lower() in ['true', '1', 't', 'y', 'yes']

    def get_where_clause(self):
        where = self.opt_where
        self.logger.info("option where is (%s)" % where)
        if where is not None:
            return " WHERE " + where
        return ""

    def get_table_slow_with_splunklib(self, force=False):
        """
        Here we'll add the code to fetch the lookup, once only ...
        Not pretending this isan awesome super scalable solution so for now we just read the whole file.
        Would be nice if we could use some static memory to store it as well, need FDSE/Eng input for that job though
        """
        if self.table == None:
            self.perf.start("load table")
            self.logger.info("================== Live call starting")

            search = "| inputlookup {} {} ".format(self.opt_lookup,
                                                   self.get_where_clause())
            self.logger.info("Lookup search is : {}".format(search))
            self.perf.start("rest call")
            self.logger.info("FARK : {}".format(self._metadata))

            res = self.service.jobs.oneshot(search, **{"count": 0})
            self.perf.end("rest call")
            # Get the results and display them using the ResultsReader
            reader = results.ResultsReader(res)
            self.table = MatchTable(self.opt_inputcols)
            self.perf.end("rest call", "part 2")
            rows = []
            i = 0
            for row in reader:
                #rows[i] = row
                #i = i+1
                pass

            self.perf.end("load table", "table size: " + str(self.table.len()))

        return self.table

    def get_table_fast_and_raw(self):
        if self.table is None:
            import requests, json
            self.perf.start("get_table_raw")

            res = requests.post(
                "%s/services/search/jobs/export" %
                (self.metadata.searchinfo.splunkd_uri),
                verify=False,
                headers={
                    "Authorization":
                    "Splunk %s" % self.metadata.searchinfo.session_key,
                    "Accept-Encoding": "gzip,deflate",
                    "User-Agent": "Python Requests",
                    "Content-Type": "application/json; charset=UTF-8",
                    "Accept": "application/json"
                },
                data={
                    "exec_mode":
                    "oneshot",
                    "output_mode":
                    "json",
                    "search":
                    "| inputlookup {} {} ".format(self.opt_lookup,
                                                  self.get_where_clause())
                })
            table = MatchTable(self.opt_inputcols)
            res.raise_for_status()
            for line in res.text.strip().split("\n"):
                table.add_row(json.loads(line)["result"])
            self.perf.end("get_table_raw", "%d Rows" % table.len())
            self.table = table
        return self.table

    def map_to_output_colums(self, match, record, debug='False'):
        if self.opt_debug:
            debug = self.normalize_debug(self.opt_debug)

        if match is not None:
            for col in self.opt_outputcols:
                try:
                    record[col] = self.barocstringformat.string_template(
                        match[col], record)
                    if debug:
                        record['debug'] = self.barocstringformat.debug_msg
                except KeyError:
                    record[col] = self.opt_undefined_str
        else:
            self.logger.warn("No match for row {}".format(record))
            record["debug"] = "nothing matched this record"
        return record

    def stream(self, records):
        """
        :param records: An iterable stream of events from the command pipeline.
        :return: `None`.
        """
        self.logger.info("================== Stream")
        sz = 0
        self.perf.start("full stream method")
        for record in records:
            sz += 1
            perf_str = "checking row {}".format(sz)
            tbl = self.get_table_fast_and_raw()
            self.perf.start(perf_str)
            match = tbl.match_table(record)
            record = self.map_to_output_colums(match, record)
            self.perf.end(perf_str)
            yield record

        self.logger.info("Iterated over {} records".format(sz))
        self.perf.end("full stream method", "{} events processed".format(sz))
Example #10
0
class LdapFilterCommand(StreamingCommand):
    """  Filters and augments events with information from Active Directory.

    This command follows a search or similar command in the pipeline so that you can feed it events:

        .. code-block:: text
        eventtype=msad-user-logons
        | ldapfilter domain=$dest_nt_domain$ search="(objectClass=$src_user$)" attrs="telephoneNumber,displayName"

    """
    # region Command options

    search = Option(
        doc=''' Specifies an RFC 2254 compliant search string.
        ''',
        require=True)

    domain = Option(
        doc=''' Specifies the Active Directory domain to search.
        ''',
        default='default')

    attrs = Option(
        doc=''' Specifies a comma separated list of attributes to return as fields.
        **Default:** '*', specifying that all attributes should be returned as fields.
        ''',
        default=[ldap3.ALL_ATTRIBUTES], validate=validators.List())

    basedn = Option(
        doc=''' Specifies the starting point for the search.
        Default: The value of `basedn` as specified in the configuration stanza for `domain`.
        ''')

    scope = Option(
        doc=''' Specifies the scope of the search to be one of `base`, `one`, or `sub`.
        **Default:** sub.
        ''',
        default='sub', validate=validators.Map(
            base=ldap3.SEARCH_SCOPE_BASE_OBJECT,
            one=ldap3.SEARCH_SCOPE_SINGLE_LEVEL,
            sub=ldap3.SEARCH_SCOPE_WHOLE_SUBTREE
        ))

    decode = Option(
        doc=''' True, if Active Directory formatting rules should be applied to attribute types.
        **Default:** The value of decode as specified in the configuration stanza for domain.
        ''',
        default=True, validate=validators.Boolean())

    limit = Option(
        doc=''' Specifies an upper bound on the number of matching entries returned by the search.
        **Default:** 0, specifying that there is no upper bound on the number of entries returned by the search.
        ''',
        default=0, validate=validators.Integer(minimum=0))

    debug = Option(
        doc=''' True, if the logging_level should be set to DEBUG; otherwise False.
        **Default:** The current value of logging_level.
        ''',
        default=False, validate=validators.Boolean())

    # endregion

    # region Command implementation

    def stream(self, records):
        """
        :param records: An iterable stream of events from the command pipeline.
        :return: `None`.

        """
        configuration = app.Configuration(self, is_expanded=True)
        expanded_domain = app.ExpandedString(self.domain)
        expanded_search_filter = app.ExpandedString(self.search, converter=app.escape_assertion_value)

        try:
            with configuration.open_connection_pool(self.attrs) as connection_pool:

                for record in records:

                    domain = expanded_domain.get_value(record)

                    if domain is None:
                        continue

                    search_filter = expanded_search_filter.get_value(record)

                    if len(search_filter) == 0:
                        continue

                    connection = connection_pool.select(domain)

                    if not connection:
                        self.logger.warning('search="%s": domain="%s" is not configured', search_filter, domain)
                        continue

                    search_base = app.ExpandedString(self.basedn).get_value(record)  # must be instantiated here

                    entry_generator = connection.extend.standard.paged_search(
                        search_base=search_base, search_filter=search_filter, search_scope=self.scope,
                        attributes=connection_pool.attributes, paged_size=configuration.paged_size)

                    for entry in entry_generator:
                        attributes = app.get_attributes(self, entry)
                        if not attributes:
                            continue
                        for name in connection_pool.attributes:
                            record[name] = attributes.get(name, '')
                        yield record

                    pass

        except ldap3.LDAPException as error:
            self.error_exit(error, app.get_ldap_error_message(error, configuration))

        return
Example #11
0
class epbox(ReportingCommand):
    doc = '''
	**Syntax:**
	search | epbox target=<target alias> outputfile=<output path/filename> outputformat=[json|raw|kv|csv|tsv|pipe] fields="field1, field2, field3" compress=[true|false]

	**Description**
	Export Splunk events to Box in any format.
	'''

    # Define Parameters
    target = Option(doc='''
		**Syntax:** **target=***<target alias>*
		**Description:** Reference to a target Box app within the configuration
		**Default:** The target configured as "Default" within the setup page (if any)''',
                    require=False)

    outputfile = Option(doc='''
		**Syntax:** **outputfile=***<file path/file name>*
		**Description:** The name of the file to be written to Box
		**Default:** The name of the user plus the timestamp and the output format, e.g. admin_1588000000.log
			json=.json, csv=.csv, tsv=.tsv, pipe=.log, kv=.log, raw=.log''',
                        require=False)

    outputformat = Option(doc='''
		**Syntax:** **outputformat=***[json|raw|kv|csv|tsv|pipe]*
		**Description:** The format written for the output events/search results
		**Default:** *csv*''',
                          require=False)

    fields = Option(doc='''
		**Syntax:** **fields=***"field1, field2, field3"*
		**Description:** Limit the fields to be written to the file
		**Default:** All (Unspecified)''',
                    require=False,
                    validate=validators.List())

    compress = Option(doc='''
		**Syntax:** **compress=***[true|false]*
		**Description:** Option to compress the output file into .gz format before uploading
		**Default:** The setting from the target configuration, or True if .gz is in the filename ''',
                      require=False,
                      validate=validators.Boolean())

    # Validators found @ https://github.com/splunk/splunk-sdk-python/blob/master/splunklib/searchcommands/validators.py

    def __getitem__(self, key):
        return getattr(self, key)

    def map(self, events):
        for e in events:
            yield (e)

    #define main function
    def reduce(self, events):

        try:
            app_config = cli.getConfStanza('ep_general', 'settings')
            cmd_config = cli.getConfStanzas('ep_box')
        except BaseException as e:
            raise Exception("Could not read configuration: " + repr(e))

        # Facility info - prepended to log lines
        facility = os.path.basename(__file__)
        facility = os.path.splitext(facility)[0]
        try:
            logger = setup_logger(app_config["log_level"],
                                  'export_everything.log', facility)
        except BaseException as e:
            raise Exception("Could not create logger: " + repr(e))

        logger.info('Box Export search command initiated')
        logger.debug('search_ep_box command: %s', self)  # logs command line

        # Enumerate proxy settings
        http_proxy = os.environ.get('HTTP_PROXY')
        https_proxy = os.environ.get('HTTPS_PROXY')
        proxy_exceptions = os.environ.get('NO_PROXY')

        if http_proxy is not None:
            logger.debug("HTTP proxy: %s" % http_proxy)
        if https_proxy is not None:
            logger.debug("HTTPS proxy: %s" % https_proxy)
        if proxy_exceptions is not None:
            logger.debug("Proxy Exceptions: %s" % proxy_exceptions)

        # Enumerate settings
        app = self._metadata.searchinfo.app
        user = self._metadata.searchinfo.username
        dispatch = self._metadata.searchinfo.dispatch_dir
        session_key = self._metadata.searchinfo.session_key

        if self.target is None and 'target=' in str(self):
            recover_parameters(self)
        # Replace all tokenized parameter strings
        replace_object_tokens(self)

        try:
            target_config = get_config_from_alias(session_key, cmd_config,
                                                  self.target)
            if target_config is None:
                exit_error(
                    logger,
                    "Unable to find target configuration (%s)." % self.target,
                    100937)
            logger.debug("Target configuration: " + str(target_config))
        except BaseException as e:
            exit_error(logger,
                       "Error reading target server configuration: " + repr(e),
                       124812)

        file_extensions = {
            'raw': '.log',
            'kv': '.log',
            'pipe': '.log',
            'csv': '.csv',
            'tsv': '.tsv',
            'json': '.json'
        }

        if self.outputformat is None:
            self.outputformat = 'csv'

        # Create the default filename
        now = str(int(time.time()))
        default_filename = ('export_' + user + '___now__' +
                            file_extensions[self.outputformat]).strip("'")

        # Split the output into folder and filename
        if self.outputfile is not None:
            folder_list = self.outputfile.split('/')
            if len(folder_list) == 1:
                # No folder specified, use the default
                use_default_folder = True
                filename = folder_list[0]
            elif folder_list[0] == '':
                # Length > 1, outputfile points to the root folder (leading /)
                use_default_folder = False
            else:
                # Length > 1 and outputfile points to a relative path (no leading /)
                use_default_folder = True

            if len(folder_list) > 1 and folder_list[-1] == '':
                # No filename provided, trailing /
                filename = default_filename
                folder_list.pop()
            elif len(folder_list) > 1 and len(folder_list[-1]) > 0:
                filename = folder_list[-1]
                folder_list.pop()
        else:
            use_default_folder = True
            filename = default_filename
            folder_list = []

        if use_default_folder:
            if 'default_folder' in list(target_config.keys()):
                # Use the configured default folder
                folder_list = target_config['default_folder'].strip('/').split(
                    '/') + folder_list
            else:
                # Use the root folder
                folder_list = ['']

        # Replace keywords from output filename and folder
        folder = replace_keywords('/'.join(folder_list))
        filename = replace_keywords(filename)
        logger.debug("Folder = " + folder)
        logger.debug("Filename = " + filename)

        if self.compress is not None:
            logger.debug('Compression: %s', self.compress)
        else:
            try:
                self.compress = target_config.get('compress')
            except:
                self.compress = False

        # Use the random number to support running multiple outputs in a single search
        random_number = str(random.randint(10000, 100000))
        staging_filename = 'export_everything_staging_' + random_number + '.txt'
        local_output_file = os.path.join(dispatch, staging_filename)
        if self.compress:
            local_output_file = local_output_file + '.gz'
        logger.debug("Staging file: %s" % local_output_file)

        # Append .gz to the output file if compress=true
        if not self.compress and len(filename) > 3:
            if filename[-3:] == '.gz':
                # We have a .gz extension when compression was not specified. Enable compression.
                self.compress = True
        elif self.compress and len(filename) > 3:
            if filename[-3:] != '.gz':
                filename = filename + '.gz'

        #if auth is not None:

        # Use the credential to connect to Box
        try:
            client = get_box_connection(target_config)
        except BaseException as e:
            exit_error(logger, "Could not connect to box: " + repr(e))

        subfolders = folder.strip('/').split('/')
        if '' in subfolders:
            subfolders.remove('')
        logger.debug("Folders: %s" % str(subfolders))
        # Prepend the list with the root element
        box_folder_object = client.root_folder().get()
        # Walk the folder path until we find the target directory
        for subfolder_name in subfolders:
            logger.debug("Looking for folder: %s" % subfolder_name)
            # Get the folder ID for the string specified from the list of child subfolders
            # folder object is from the previous iteration
            folder_contents = box_folder_object.get_items()
            folder_found = False
            for item in folder_contents:
                if item.type == 'folder':
                    #logger.debug('{0} {1} is named "{2}"'.format(item.type.capitalize(), item.id, item.name))
                    if subfolder_name == item.name:
                        logger.debug("Found a target folder ID: %s" %
                                     str(item.id))
                        box_folder_object = client.folder(folder_id=item.id)
                        folder_found = True
            if not folder_found:
                # Create the required subfolder
                box_folder_object = box_folder_object.create_subfolder(
                    subfolder_name)

        try:
            event_counter = 0
            # Write the output file to disk in the dispatch folder
            logger.debug(
                "Writing events to dispatch file. file=\"%s\" format=%s compress=%s fields=%s",
                local_output_file, self.outputformat, self.compress,
                self.fields)
            for event in event_file.write_events_to_file(
                    events, self.fields, local_output_file, self.outputformat,
                    self.compress):
                yield event
                event_counter += 1

        except BoxAPIException as be:
            exit_error(logger, be.message, 833928)
        except BaseException as e:
            exit_error(logger, "Error writing file to upload", 398372)

        try:
            new_file = box_folder_object.upload(local_output_file,
                                                file_name=filename)
            message = "Box Export Status: Success. File name: %s, File ID: %s" % (
                new_file.name, new_file.id)
            eprint(message)
            logger.info(message)
        except BaseException as e:
            exit_error(logger, "Error uploading file to Box: " + repr(e),
                       109693)
Example #12
0
class epsmb(ReportingCommand):
    doc = '''
	**Syntax:**
	search | epsmb target=<target host alias> outputfile=<output path/filename> outputformat=[json|raw|kv|csv|tsv|pipe] fields="field1, field2, field3" compress=[true|false]

	**Description**
	Export Splunk events to an SMB server share in any format.
	'''

    # Define Parameters
    target = Option(doc='''
		**Syntax:** **target=***<target_host_alias>*
		**Description:** Reference to a target SMB share within the configuration
		**Default:** The target configured as "Default" within the setup page (if any)''',
                    require=False)

    outputfile = Option(doc='''
		**Syntax:** **outputfile=***<file path/file name>*
		**Description:** The name of the file to be written remotely
		**Default:** The name of the user plus the timestamp and the output format, e.g. admin_1588000000.log
			json=.json, csv=.csv, tsv=.tsv, pipe=.log, kv=.log, raw=.log''',
                        require=False)

    outputformat = Option(doc='''
		**Syntax:** **outputformat=***[json|raw|kv|csv|tsv|pipe]*
		**Description:** The format written for the output events/search results
		**Default:** *csv*''',
                          require=False)

    fields = Option(doc='''
		**Syntax:** **fields=***"field1, field2, field3"*
		**Description:** Limit the fields to be written to the file
		**Default:** All (Unspecified)''',
                    require=False,
                    validate=validators.List())

    compress = Option(doc='''
		**Syntax:** **compress=***[true|false]*
		**Description:** Option to compress the output file into .gz format before uploading
		**Default:** The setting from the target configuration, or True if .gz is in the filename ''',
                      require=False,
                      validate=validators.Boolean())

    # Validators found @ https://github.com/splunk/splunk-sdk-python/blob/master/splunklib/searchcommands/validators.py

    def __getitem__(self, key):
        return getattr(self, key)

    def map(self, events):
        for e in events:
            yield (e)

    #define main function
    def reduce(self, events):

        try:
            app_config = cli.getConfStanza('ep_general', 'settings')
            cmd_config = cli.getConfStanzas('ep_smb')
        except BaseException as e:
            raise Exception("Could not read configuration: " + repr(e))

        # Facility info - prepended to log lines
        facility = os.path.basename(__file__)
        facility = os.path.splitext(facility)[0]
        try:
            logger = setup_logger(app_config["log_level"],
                                  'export_everything.log', facility)
        except BaseException as e:
            raise Exception("Could not create logger: " + repr(e))

        logger.info('SMB Export search command initiated')
        logger.debug('search_ep_smb command: %s', self)  # logs command line

        # Enumerate proxy settings
        http_proxy = os.environ.get('HTTP_PROXY')
        https_proxy = os.environ.get('HTTPS_PROXY')
        proxy_exceptions = os.environ.get('NO_PROXY')

        if http_proxy is not None:
            logger.debug("HTTP proxy: %s" % http_proxy)
        if https_proxy is not None:
            logger.debug("HTTPS proxy: %s" % https_proxy)
        if proxy_exceptions is not None:
            logger.debug("Proxy Exceptions: %s" % proxy_exceptions)

        # Enumerate settings
        app = self._metadata.searchinfo.app
        user = self._metadata.searchinfo.username
        dispatch = self._metadata.searchinfo.dispatch_dir
        session_key = self._metadata.searchinfo.session_key

        if self.target is None and 'target=' in str(self):
            recover_parameters(self)
        # Replace all tokenized parameter strings
        replace_object_tokens(self)

        # Use the random number to support running multiple outputs in a single search
        random_number = str(random.randint(10000, 100000))

        try:
            target_config = get_config_from_alias(session_key, cmd_config,
                                                  self.target)
            if target_config is None:
                exit_error(
                    logger,
                    "Unable to find target configuration (%s)." % self.target,
                    100937)
        except BaseException as e:
            exit_error(logger,
                       "Error reading target server configuration: " + repr(e),
                       124812)

        # Get the local client hostname
        client_name = socket.gethostname()
        # Delete any domain from the client hostname string
        if '.' in client_name:
            client_name = client_name[0:client_name.index('.')]

        # Check to see if we have credentials
        valid_settings = []
        for l in list(target_config.keys()):
            if len(target_config[l]) > 0:
                valid_settings.append(l)
        if 'host' in valid_settings:
            # A target has been configured. Check for credentials.
            try:
                if 'credential_username' in valid_settings and 'credential_password' in valid_settings and 'share_name' in valid_settings:
                    domain = target_config[
                        'credential_realm'] if 'credential_realm' in list(
                            target_config.keys()) else target_config['host']

                    try:
                        # Try port 445 first
                        conn = SMBConnection(
                            target_config['credential_username'],
                            target_config['credential_password'],
                            client_name,
                            target_config['host'],
                            domain=domain,
                            use_ntlm_v2=True,
                            sign_options=SMBConnection.SIGN_WHEN_SUPPORTED,
                            is_direct_tcp=True)
                        connected = conn.connect(target_config['host'],
                                                 445,
                                                 timeout=5)

                        if target_config['share_name'] not in (
                                s.name for s in conn.listShares(timeout=10)):
                            exit_error(
                                logger,
                                "Unable to find the specified share name on the server",
                                553952)
                        '''
						p445_error = repr(e445)
						try:
							# Try port 139 if that didn't work
							conn = SMBConnection(target_config['credential_username'], target_config['credential_password'], client_name, 
							target_config['host'], domain=domain, use_ntlm_v2=True,
							sign_options = SMBConnection.SIGN_WHEN_SUPPORTED) 
							connected = conn.connect(target_config['host'], 139, timeout=5)
						except BaseException as e139:
							p139_error = repr(e139)
							raise Exception("Errors connecting to host: \\nPort 139: %s\\nPort 445: %s" % (p139_error, p445_error))

						conn = SMBConnection(target_config['credential_username'], target_config['credential_password'], client_name, 
							target_config['host'], domain=domain, use_ntlm_v2=True,
							sign_options = SMBConnection.SIGN_WHEN_SUPPORTED) 
						connected = conn.connect(target_config['host'], 139)
						shares = 
						share_exists = False
						for i in range(len(shares)):
							if shares[i].name == target_config['share_name']:
								share_exists = True
								break
						'''
                    except BaseException as e:
                        exit_error(
                            logger,
                            "Unable to setup SMB connection: " + repr(e),
                            921982)
                else:
                    exit_error(logger, "Required settings not found", 101926)
            except BaseException as e:
                exit_error(logger,
                           "Error reading the configuration: " + repr(e),
                           230494)
        else:
            exit_error(logger,
                       "Could not find required configuration settings",
                       2823874)

        file_extensions = {
            'raw': '.log',
            'kv': '.log',
            'pipe': '.log',
            'csv': '.csv',
            'tsv': '.tsv',
            'json': '.json'
        }

        if self.outputformat is None:
            self.outputformat = 'csv'
        # Create the default filename
        default_filename = ('export_' + user + '___now__' +
                            file_extensions[self.outputformat]).strip("'")

        folder, filename = event_file.parse_outputfile(self.outputfile,
                                                       default_filename,
                                                       target_config)

        if self.compress is not None:
            logger.debug('Compression: %s', self.compress)
        else:
            try:
                self.compress = target_config.get('compress')
            except:
                self.compress = False

        staging_filename = 'export_everything_staging_' + random_number + '.txt'
        local_output_file = os.path.join(dispatch, staging_filename)
        if self.compress:
            local_output_file = local_output_file + '.gz'

        # Append .gz to the output file if compress=true
        if not self.compress and len(filename) > 3:
            if filename[-3:] == '.gz':
                # We have a .gz extension when compression was not specified. Enable compression.
                self.compress = True
        elif self.compress and len(filename) > 3:
            if filename[-3:] != '.gz':
                filename = filename + '.gz'

        if conn is not None:
            # Use the credential to connect to the SFTP server
            try:
                # Check to see if the folder exists
                folder_attrs = conn.getAttributes(target_config['share_name'],
                                                  folder,
                                                  timeout=10)
            except BaseException:
                # Remote directory could not be loaded. It must not exist. Create it.
                # Create the folders required to store the file
                subfolders = ['/'] + folder.strip('/').split('/')
                if '' in subfolders:
                    subfolders.remove('')
                logger.debug("Folders list for dir creation: %s" %
                             str(subfolders))
                current_folder = ''
                folder_depth = len(subfolders) - 1
                for i, subfolder_name in enumerate(subfolders):
                    current_folder = (current_folder + '/' +
                                      subfolder_name).replace('//', '/')
                    logger.debug("Current folder = " + current_folder)
                    try:
                        conn.getAttributes(target_config['share_name'],
                                           current_folder,
                                           timeout=10)
                    except:
                        conn.createDirectory(target_config['share_name'],
                                             current_folder,
                                             timeout=10)
                try:
                    folder_attrs = conn.getAttributes(
                        target_config['share_name'], folder, timeout=10)
                except BaseException as e:
                    exit_error(
                        logger, "Could not load or create remote directory: " +
                        repr(e), 377890)

            # This should always be true
            if folder_attrs is not None:
                if folder_attrs.isReadOnly or not folder_attrs.isDirectory:
                    exit_error(
                        logger,
                        "Could not access the remote directory: " + repr(e),
                        184772)
                else:
                    try:
                        event_counter = 0
                        # Write the output file to disk in the dispatch folder
                        logger.debug(
                            "Writing events to dispatch file. file=\"%s\" format=%s compress=%s fields=%s",
                            local_output_file, self.outputformat,
                            self.compress, self.fields)
                        for event in event_file.write_events_to_file(
                                events, self.fields, local_output_file,
                                self.outputformat, self.compress):
                            yield event
                            event_counter += 1
                    except BaseException as e:
                        exit_error(logger,
                                   "Error writing file to upload: " + repr(e),
                                   296733)

                    # Write the file to the remote location
                    try:
                        with open(local_output_file, 'rb',
                                  buffering=0) as local_file:
                            bytes_uploaded = conn.storeFile(
                                target_config['share_name'],
                                folder + '/' + filename, local_file)
                    except BaseException as e:
                        exit_error(
                            logger,
                            "Error uploading file to SMB server: " + repr(e),
                            109693)

                    if bytes_uploaded > 0:
                        message = "SMB Export Status: Success. File name: %s" % (
                            folder + '/' + filename)
                        eprint(message)
                        logger.info(message)
                    else:
                        exit_error(logger, "Zero bytes uploaded", 771293)
        else:
            exit_error(logger, "Could not connect to server.", 159528)
Example #13
0
class epawss3(ReportingCommand):
    doc = '''
	**Syntax:**
	search | epawss3 target=<target alias> bucket=<bucket> outputfile=<output path/filename> outputformat=[json|raw|kv|csv|tsv|pipe]

	**Description**
	Export Splunk events to AWS S3 (or compatible) over JSON or raw text.
	'''

    #Define Parameters
    target = Option(doc='''
		**Syntax:** **target=***<target alias>*
		**Description:** The name of the AWS target alias provided on the configuration dashboard
		**Default:** The target configured as "Default" within the AWS S3 Setup page (if any)''',
                    require=False)

    bucket = Option(doc='''
		**Syntax:** **bucket=***<bucket name>*
		**Description:** The name of the destination S3 bucket
		**Default:** The bucket name from the AWS S3 Setup page (if any)''',
                    require=False)

    outputfile = Option(doc='''
		**Syntax:** **outputfile=***<output path/filename>*
		**Description:** The path and filename to be written to the S3 bucket
		**Default:** The name of the user plus the timestamp and the output format, e.g. admin_1588000000.log
			json=.json, csv=.csv, tsv=.tsv, pipe=.log, kv=.log, raw=.log''',
                        require=False)

    outputformat = Option(doc='''
		**Syntax:** **outputformat=***[json|raw|kv|csv|tsv|pipe]*
		**Description:** The format written for the output events/search results
		**Default:** *csv*''',
                          require=False)

    fields = Option(doc='''
		**Syntax:** **fields=***"field1, field2, field3"*
		**Description:** Limit the fields to be written to the S3 file
		**Default:** All (Unspecified)''',
                    require=False,
                    validate=validators.List())

    compress = Option(doc='''
		**Syntax:** **compress=***[true|false]*
		**Description:** Option to compress the output file into .gz format before uploading
		**Default:** The setting from the target configuration, or True if .gz is in the filename ''',
                      require=False,
                      validate=validators.Boolean())

    # Validators found @ https://github.com/splunk/splunk-sdk-python/blob/master/splunklib/searchcommands/validators.py

    def __getitem__(self, key):
        return getattr(self, key)

    def map(self, events):
        for e in events:
            yield (e)

    #define main function
    def reduce(self, events):
        try:
            app_config = cli.getConfStanza('ep_general', 'settings')
            cmd_config = cli.getConfStanzas('ep_aws_s3')
        except BaseException as e:
            raise Exception("Could not read configuration: " + repr(e))

        # Facility info - prepended to log lines
        facility = os.path.basename(__file__)
        facility = os.path.splitext(facility)[0]
        try:
            logger = setup_logger(app_config["log_level"],
                                  'export_everything.log', facility)
        except BaseException as e:
            raise Exception("Could not create logger: " + repr(e))

        logger.info('AWS S3 Export search command initiated')
        logger.debug("Configuration: " + str(cmd_config))
        logger.debug('search_ep_awss3 command: %s', self)  # logs command line

        # Enumerate settings
        app = self._metadata.searchinfo.app
        user = self._metadata.searchinfo.username
        dispatch = self._metadata.searchinfo.dispatch_dir
        session_key = self._metadata.searchinfo.session_key

        if self.target is None and 'target=' in str(self):
            recover_parameters(self)
        # Replace all tokenized parameter strings
        replace_object_tokens(self)

        # Build the configuration
        try:
            aws_config = get_config_from_alias(session_key, cmd_config,
                                               self.target)
            if aws_config is None:
                exit_error(
                    logger,
                    "Unable to find target configuration (%s)." % self.target,
                    100937)
            logger.debug("Target configuration: " + str(aws_config))
        except BaseException as e:
            exit_error(logger,
                       "Error reading target server configuration: " + repr(e),
                       124812)

        if self.bucket is None:
            if 'default_s3_bucket' in list(aws_config.keys()):
                t = aws_config['default_s3_bucket']
                if t is not None and len(t) > 0:
                    self.bucket = t
                else:
                    exit_error(logger, "No bucket specified", 4)
            else:
                exit_error(logger, "No bucket specified", 5)

        file_extensions = {
            'raw': '.log',
            'kv': '.log',
            'pipe': '.log',
            'csv': '.csv',
            'tsv': '.tsv',
            'json': '.json'
        }

        if self.outputformat is None:
            self.outputformat = 'csv'

        if self.outputfile is None:
            # Boto is special. We need repr to give it the encoding it expects to match the hashing.
            self.outputfile = repr('export_' + user + '___now__' +
                                   file_extensions[self.outputformat]).strip(
                                       "'")

        # Replace keywords from output filename
        self.outputfile = replace_keywords(self.outputfile)

        if self.compress is not None:
            logger.debug('Compression: %s', self.compress)
        else:
            try:
                self.compress = str2bool(aws_config['compress'])
            except:
                self.compress = False

        # Use the random number to support running multiple outputs in a single search
        random_number = str(random.randint(10000, 100000))
        staging_filename = 'export_everything_staging_' + random_number + '.txt'
        local_output_file = os.path.join(dispatch, staging_filename)

        # Append .gz to the output file if compress=true
        if not self.compress and len(self.outputfile) > 3:
            # We have a .gz extension when compression was not specified. Enable compression.
            if self.outputfile[-3:] == '.gz':
                self.compress = True
        elif self.compress and len(self.outputfile) > 3:
            if self.outputfile[-3:] != '.gz':
                self.outputfile = self.outputfile + '.gz'

        if self.compress:
            local_output_file = local_output_file + '.gz'

        logger.debug("Staging file: %s" % local_output_file)
        try:
            s3 = get_aws_connection(aws_config)
        except BaseException as e:
            exit_error(logger, "Could not connect to AWS: " + repr(e), 741423)

        event_counter = 0
        # Write the output file to disk in the dispatch folder
        logger.debug(
            "Writing events to file %s in %s format. Compress=%s\n\tfields=%s",
            local_output_file, self.outputformat, self.compress, self.fields)
        for event in event_file.write_events_to_file(events, self.fields,
                                                     local_output_file,
                                                     self.outputformat,
                                                     self.compress):
            yield event
            event_counter += 1

        # Upload file to s3
        try:
            with open(local_output_file, "rb") as f:
                s3.upload_fileobj(f, self.bucket, self.outputfile)
            s3 = None
            sts_client = None
            logger.info(
                "Successfully exported events to s3. app=%s count=%s bucket=%s file=%s user=%s"
                % (app, event_counter, self.bucket, self.outputfile, user))
            os.remove(local_output_file)
        except s3.exceptions.NoSuchBucket as e:
            exit_error(logger, "Error: No such bucket", 123833)
        except BaseException as e:
            exit_error(logger, "Could not upload file to S3: " + repr(e), 9)
Example #14
0
class LdapSearchCommand(GeneratingCommand):
    """ Retrieves results from the specified search in a configured domain and generates events.

    This command must be placed at the beginning of a search pipeline:

        .. code-block:: text
        | ldapsearch domain=splunk.com search="(objectCategory=User)" attrs="distinguishedName"

    """

    search = Option(doc=''' Specifies an RFC 2254 compliant search string.
        ''',
                    require=True)

    attrs = Option(
        doc=
        ''' Specifies a comma separated list of attributes to be returned as fields.
        **Default:** '*', specifying that all attributes should be returned as fields.
        ''',
        default=[ldap3.ALL_ATTRIBUTES],
        validate=validators.List())

    basedn = Option(doc=''' Specifies the starting point for the search.
        Default: The value of basedn as specified in the configuration stanza for domain.
        ''')

    domain = Option(
        doc=
        ''' Specifies the LDAP or Active Directory domain directory to search.
        ''',
        default='default')

    scope = Option(
        doc=''' Specifies the scope of the search to be one of base, one, or sub.
        **Default:** sub.
        ''',
        default='sub',
        validate=validators.Map(base=ldap3.SEARCH_SCOPE_BASE_OBJECT,
                                one=ldap3.SEARCH_SCOPE_SINGLE_LEVEL,
                                sub=ldap3.SEARCH_SCOPE_WHOLE_SUBTREE))

    debug = Option(
        doc=
        ''' True, if the logging_level should be set to DEBUG; otherwise False.
        **Default:** The current value of logging_level.
        ''',
        default=False,
        validate=validators.Boolean())

    decode = Option(
        doc=
        ''' True, if Active Directory formatting rules should be applied to attribute types.
        **Default:** The value of decode as specified in the configuration stanza for domain.
        ''',
        default=True,
        validate=validators.Boolean())

    limit = Option(
        doc=
        ''' Specifies an upper bound on the number of matching entries returned by the search.
        **Default:** 0, specifying that there is no upper bound on the number of entries returned by the search.
        ''',
        default=0,
        validate=validators.Integer(minimum=0))

    def generate(self):
        """
        :return: `None`.

        """
        configuration = app.Configuration(self)

        try:
            with ldap3.Connection(
                    configuration.server,
                    read_only=True,
                    raise_exceptions=True,
                    user=configuration.credentials.username,
                    password=configuration.credentials.password) as connection:

                attribute_names = app.get_normalized_attribute_names(
                    self.attrs, connection, configuration)

                entry_generator = connection.extend.standard.paged_search(
                    search_base=self.basedn,
                    search_filter=self.search,
                    search_scope=self.scope,
                    attributes=self.attrs,
                    paged_size=configuration.paged_size)

                encoder = JSONEncoder(ensure_ascii=False,
                                      separators=(',', ':'))
                time_stamp = time()
                serial_number = 0

                for entry in entry_generator:
                    attributes = app.get_attributes(self, entry)
                    if attributes:
                        dn = entry['dn']
                        yield LdapSearchCommand._record(
                            serial_number, time_stamp, connection.server.host,
                            dn, attributes, attribute_names, encoder)
                        serial_number += 1
                    if self.limit and serial_number == self.limit:
                        break
                    pass

                pass

        except ldap3.LDAPException as error:
            self.error_exit(error,
                            app.get_ldap_error_message(error, configuration))

        return

    @staticmethod
    def _record(serial_number, time_stamp, host, dn, attributes,
                attribute_names, encoder):

        # Base-64 encode binary values (they're stored as str values--byte strings--not unicode values)

        for name, value in attributes.iteritems():
            if isinstance(value, str):
                attributes[name] = b64encode(value)
            elif isinstance(value, list):
                for i in range(len(value)):
                    if isinstance(value[i], str):
                        value[i] = b64encode(value[i])

        raw = encoder.encode(attributes)

        # Formulate record

        if serial_number > 0:
            attributes['_serial'] = serial_number
            attributes['_time'] = time_stamp
            attributes['_raw'] = raw
            attributes['host'] = host
            attributes['dn'] = dn
            return attributes

        record = OrderedDict(
            chain((('_serial', serial_number), ('_time', time_stamp),
                   ('_raw', raw), ('host', host), ('dn', dn)),
                  imap(lambda name: (name, attributes.get(name, '')),
                       attribute_names)))

        return record