def test_generating_command_as_unit(self):
        simulate_path = get_searchcommand_example("simulate.py")
        self.assertTrue(os.path.isfile(simulate_path))

        # Copy population.csv to the $SPLUNK_HOME/var/run/splunk/ directory
        population_file = os.path.join(os.path.dirname(simulate_path),
                                       "population.csv")
        shutil.copy(population_file, validators.File._var_run_splunk)

        # load the SimulateCommand class from simulate.py
        simulate = imp.load_source('searchcommands_app', simulate_path)

        instream = StringIO()
        outstream = StringIO()
        cli_args = [
            "simulate.py",
            "__GETINFO__",
            "duration=00:00:10",
            "csv=population.csv",
            "rate=1",
            "interval=00:00:01"]
        # Run the process
        dispatch(simulate.SimulateCommand, cli_args, instream, outstream,
                 "__main__")
        expected_info_path = os.path.join(os.path.dirname(__file__), 'data/_expected_results/test_generating_command_in_isolation.getinfo.csv')
        self.assertEqual(io.open(os.path.abspath(expected_info_path), newline='').read(), outstream.getvalue())

        instream = StringIO()
        outstream = StringIO()
        cli_args = [
            "simulate.py",
            "__EXECUTE__",
            "duration=00:00:10",
            "csv=population.csv",
            "rate=1",
            "interval=00:00:01"]
        # Run the process
        dispatch(simulate.SimulateCommand, cli_args, instream, outstream,
                 "__main__")

        rows = outstream.getvalue().split("\r\n")[1:-1]

        found_fields = rows[0].split(",")
        expected_fields = [
            '_time',
            '_serial',
            'text',
            '__mv__time',
            '__mv__serial',
            '__mv_text',
        ]
        self.assertEqual(len(expected_fields), len(found_fields))
        self.assertEqual(expected_fields, found_fields)

        # did we get the field names and at least 2 events?
        self.assertTrue(3 < len(rows))

        return
    def test_helloworld_generating_command_as_unit(self):
        helloworld_path = get_searchcommand_example("generatehello.py")
        self.assertTrue(os.path.isfile(helloworld_path))
        helloworld = imp.load_source('searchcommands_app', helloworld_path)

        instream = StringIO()
        outstream = StringIO()
        cli_args = [
            "generatehello.py",
            "__GETINFO__",
            "count=5",
        ]
        # Run the process
        dispatch(helloworld.GenerateHelloCommand, cli_args, instream, outstream,
                 "__main__")
        expected_info_path = os.path.join(os.path.dirname(__file__), 'data/_expected_results/test_generating_command_in_isolation.getinfo.csv')
        self.assertEqual(io.open(os.path.abspath(expected_info_path), newline='').read(), outstream.getvalue())

        # Overwrite the existing StringIO objects
        instream = StringIO()
        outstream = StringIO()
        cli_args = [
            "generatehello.py",
            "__EXECUTE__",
            "count=5",
        ]
        # Run the process
        dispatch(helloworld.GenerateHelloCommand, cli_args, instream, outstream,
                 "__main__")

        # Trim the blank lines at either end of the list
        rows = outstream.getvalue().split("\r\n")[1:-1]

        found_fields = rows[0].split(",")
        expected_fields = [
            '_time',
            'event_no',
            '_raw',
            '__mv__time',
            '__mv_event_no',
            '__mv__raw',
        ]

        self.assertEqual(len(expected_fields), len(found_fields))
        self.assertEqual(expected_fields, found_fields)

        # Trim the field names
        events = rows[1:]
        self.assertEqual(5, len(events))

        for i in range(1, len(events)):
            event = events[i].split(",")
            self.assertEqual(i + 1, int(event[1]))
            self.assertEqual(i + 1, int(event[2][-1]))
        return
Esempio n. 3
0
    outputfield = Option(
        doc='''
        **Syntax:** **outputfield=***<fieldname>*
        **Description:** Name of the field that will hold the found time''',
        require=True, validate=validators.Fieldname())

    def stream(self, records):
        self.logger.debug('StrptimeFindCommand: %s', self)  # logs command line
        pattern = self.pattern
        for record in records:
            count = 0
            datetime_str_orig = record[self.fieldname]
            valid_strptime_string = record[self.pattern]
            datetime_object = 0
            limit=len(valid_strptime_string)
            while len(datetime_str_orig) > limit:
                datetime_str=datetime_str_orig
                while len(datetime_str) > limit:
                    try:
                        datetime_object = datetime.strptime(datetime_str, valid_strptime_string)
                        break
                    except:
                        datetime_str = datetime_str[:-1]
                datetime_str_orig = datetime_str_orig[1:]
            if datetime_object:
                record[self.outputfield] = time.mktime(datetime_object.timetuple())
            yield record

dispatch(StrptimeFindCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 4
0
@Configuration()
class Neo4jCommand(GeneratingCommand):
    query = Option(require=True)
    host = Option(require=True)
    username = Option(require=False, default="")
    password = Option(require=False, default="")
    scheme = Option(require=False, default="bolt")

    def __get_data(self, query, host, username, password, scheme):
        url = scheme + "://" + host
        # set up authentication parameters
        auth = None
        if username != "" and password != "":
            auth = basic_auth(username, password)
        driver = GraphDatabase.driver(url, auth=auth)
        session = driver.session()
        results = session.run(query, parameters={})
        for record in results:
            yield (record)

    def generate(self):
        results = self.__get_data(self.query, self.host, self.username,
                                  self.password, self.scheme)

        fields_extractor = FieldsExtractor()
        return fields_extractor.extract(results)


dispatch(Neo4jCommand, module_name=__name__)
Esempio n. 5
0
                            event_id = hashlib.md5(incident[0]['incident_id'] + now).hexdigest()
                            event = 'time="%s" severity=INFO origin="ModifyIncidentsCommand" event_id="%s" user="******" action="change" incident_id="%s" %s="%s" previous_%s="%s"' % (now, event_id, user, incident[0]['incident_id'], key, attrs[key], key, incident[0][key])
                            
                            input.submit(event, hostname = socket.gethostname(), sourcetype = 'incident_change', source = 'modifyincidents.py', index = self.config['index'])

                            incident[0][key] = attrs[key]

                    if len(changed_keys) > 0:
                        uri = '/servicesNS/nobody/alert_manager/storage/collections/data/incidents/' + incident[0]['_key']
                        del incident[0]['_key']
                        contentsStr = json.dumps(incident[0])
                        serverResponse, serverContent = rest.simpleRequest(uri, sessionKey=sessionKey, jsonargs=contentsStr)

                    if self.comment:
                        self.comment = self.comment.replace('\n', '<br />').replace('\r', '')
                        event_id = hashlib.md5(incident[0]['incident_id'] + now).hexdigest()
                        event = 'time="%s" severity=INFO origin="ModifyIncidentsCommand" event_id="%s" user="******" action="comment" incident_id="%s" comment="%s"' % (now, event_id, user, incident[0]['incident_id'], self.comment)
                        event = event.encode('utf8')
                        input.submit(event, hostname = socket.gethostname(), sourcetype = 'incident_change', source = 'modifyincidents.py', index = self.config['index'])

                else:                        
                    self.logger.warn("No attributes to modify found, aborting.")

            else:
                self.logger.warn("No incident_id field found in event, aborting.")  

            yield record
       

dispatch(ModifyIncidentsCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 6
0
    """

    Windd = Option(doc=''' Temp Field ''', default='Windd')

    field = Option(doc=''' field to put value in ''', default='Wind_dir')

    def stream(self, events):
        """

        :param events:
        :return:
        """
        dirs = [
            "", "N", "NNE", "NE", "ENE", "E", "ESE", "SE", "SSE", "S", "SSW",
            "SW", "WSW", "W", "WNW", "NW", "NNW", "N"
        ]
        for event in events:
            self.logger.debug("start")
            try:

                wd = float(event[self.Windd])

                event[self.field] = dirs[int(1 + abs(round(wd / 22.5, 0)))]
            except:
                event[self.field] = "N/A"

            yield event


dispatch(WindDir, sys.argv, sys.stdin, sys.stdout, __name__)
from splunklib.searchcommands import dispatch, StreamingCommand, Configuration, Option, validators
import sys, json
from splunklib.client import connect
from alert_collection import AlertCollection

@Configuration()
class DeleteAlertsCommand(StreamingCommand):

    key = Option(
        doc='''
        **Syntax:** **key=***<field>*
        **Description:** The internal key of the alert''',
        require=True, validate=validators.Fieldname())

    alerts = None

    def stream(self, records):
        self.logger.info('DeleteAlertsCommand: %s', self)  # logs command line
        if not self.alerts:
            self.alerts = AlertCollection(self._metadata.searchinfo.session_key)

        for record in records:
            if self.key in record:
                self.alerts.delete(record[self.key], logger=self.logger)
            else:
                self.logger.error('DeleteAlertsCommand: no key field %s', str(self.json))  # logs command line
            yield record

dispatch(DeleteAlertsCommand, sys.argv, sys.stdin, sys.stdout, __name__)

Esempio n. 8
0
            pattern = "[^_].*" + pattern[2:]
        pattern = "(?i)^" + pattern + "$"
        return pattern

    #Streaming command to work with each record
    def stream(self, records):
        self.logger.debug('StreamFilterWildcardCommand: %s',
                          self)  # logs command line
        for record in records:
            values = ""
            pattern = self.pattern
            if pattern not in record:
                self.logger.warn(
                    "StreamFilterWildcardCommand: pattern field is %s but cannot find this field"
                    % (pattern), self)
                sys.exit(-1)
            if isinstance(record[pattern], list):
                for aPattern in record[pattern]:
                    pattern = re.compile(self.changeToWildcard(aPattern))
                    values = values + self.thefilter(record, pattern)
            else:
                pattern = re.compile(self.changeToWildcard(record[pattern]))
                values = values + self.thefilter(record, pattern)

            record[self.fieldname] = values
            yield record


dispatch(StreamFilterWildcardCommand, sys.argv, sys.stdin, sys.stdout,
         __name__)
Esempio n. 9
0
            "NE",
            "ENE",
            "E",
            "ESE",
            "SE",
            "SSE",
            "S",
            "SSW",
            "SW",
            "WSW",
            "W",
            "WNW",
            "NW",
            "NNW",
            "N",
        ]
        for event in events:
            self.logger.debug("start")
            try:

                wd = float(event[self.Windd])

                event[self.field] = dirs[int(1 + abs(round(wd / 22.5, 0)))]
            except:
                event[self.field] = "N/A"

            yield event


dispatch(WindDir, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 10
0
    ##Description

    %(description)

    """

    Temp = Option(doc=""" Temp Field """, default="Temp")

    field = Option(doc=""" field to put value in """, default="rankine")

    def stream(self, events):
        """

        :param events:
        :return:
        """

        for event in events:
            self.logger.debug("start")
            try:
                temp = float(event[self.Temp])

                event[self.field] = round(temp + 459.67, 2)
            except:
                event[self.field] = "N/A"

            yield event


dispatch(Rankine, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 11
0
		X = X.toarray()
		y_pred = None
		mah = None

		clf = classifiers.get(classifier)
		if clf:
			try:
				clf.fit(X)
				y = clf.decision_function(X).ravel()
				threshold = stats.scoreatpercentile(y, 100 * fraction)
				y_pred = y > threshold
				if classifier == 'covariance_estimator' and args['showmah']:
					mah = clf.mahalanobis(X)
			except ValueError:
				y_pred = np.zeros((X.shape[0]))

			for i, y in enumerate(y_pred):
				if y:
					record = records[i]
					if mah is not None:
						record['mahalanobis'] = mah[i].item()
					channel.send(record)
		else:
			channel.send({ "error": "Incorrect classifier specified %s" % classifier })
"""

	def __dir__(self):
		return ['threshold','kernel','degree','gamma','coef0','support_fraction','showmah','classifier']

dispatch(Outliers, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 12
0
                            current[relative_distance] = vincenty(last_pos, current_pos, miles=bool(self.miles))
                    position_tracker[current[self.group_by]] = current_pos
                yield current
        else:
            last_pos = None
            for event in events:
                current = event
                if not (current[latitude] or current[longitude]):
                    current[relative_distance] = 0.0
                    self.logger.debug(
                        "[%s] - Using distance=0 for private IPs or unknown coordinates. Exclude if undesired." % str(
                            self.metadata.searchinfo.sid))
                else:
                    current_pos = (float(current[latitude]), float(current[longitude]))
                    if last_pos is None:
                        current[relative_distance] = 0.0
                        self.logger.debug("[%s] - Initializing the first location with distance=0" % str(
                            self.metadata.searchinfo.sid))
                    else:
                        if use_haversine:
                            current[relative_distance] = haversine(last_pos, current_pos, miles=bool(self.miles))
                        else:
                            current[relative_distance] = vincenty(last_pos, current_pos, miles=bool(self.miles))
                    last_pos = current_pos
                self.logger.debug(current)
                yield current
            self.logger.info("[%s] - Completed successfully." % str(self.metadata.searchinfo.sid))


dispatch(GeoDistanceCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 13
0
import logging, os, splunk

from splunklib.searchcommands import \
    dispatch, StreamingCommand, Configuration, Option, validators


@Configuration()
class SearchTableCommand(StreamingCommand):
    pattern = Option(
        doc='''
        **Syntax:** **pattern=***<regular-expression>*
        **Description:** Regular expression pattern to match''',
        require=False, validate=validators.RegularExpression())

    def stream(self, records):
        #pydevd.settrace()
        self.logger.setLevel(logging.DEBUG)
        self.logger.debug('SearchTableCommand: %s' % self)  # logs command line
        for record in records:
            found = "false"
            for field in record:
                matches = len(list(self.pattern.finditer(str(record[field]))))
                if matches > 0:
                    found = "true"
            if found == "true":
                yield record
        self.logger.debug('SearchTableCommand: Done') 

dispatch(SearchTableCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 14
0
				#print 'Row:', curr_row
				curr_cell = -1
				_raw = ""
				line = {}
				while curr_cell < num_cells:
					if curr_row > 0:
						curr_cell += 1
						# Cell Types: 0=Empty, 1=Text, 2=Number, 3=Date, 4=Boolean, 5=Error, 6=Blank
						cell_type = worksheet.cell_type(curr_row, curr_cell)
						cell_value = worksheet.cell_value(curr_row, curr_cell)
						head_value = worksheet.cell_value(0, curr_cell)
						#print '	', cell_type, ':', cell_value
						_raw = _raw + '"' + str(head_value) + '"="' + str(cell_value) +'" '
						line[str(head_value)] = str(cell_value)
					else:
						curr_cell += 1
				if curr_row > 1:
					line["_time"] = time.time()
					line["line_number"] = i
					line["_raw"] = _raw
					line["worksheet_name"] = worksheet_name
					yield line
					i=i+1
					
			
dispatch(WGetxlsCommand, sys.argv, sys.stdin, sys.stdout, __name__)




Esempio n. 15
0
                        asset['_raw'] = util.tojson(asset)
                        yield asset

                else:
                    try:
                        # If not 200 status_code showing error message in Splunk UI
                        record = util.dictexpand(response)
                        record['url'] = url
                        record['_raw'] = util.tojson(response)
                    except Exception as e:
                        record = dict()
                        record['url'] = url
                        record['error'] = e
                        record['_raw'] = util.tojson(response)
                        yield record

        else:
            try:
                # If not 200 status_code showing error message in Splunk UI
                record = util.dictexpand(response)
                record['url'] = url
                record['_raw'] = util.tojson(response)
            except Exception as e:
                record = dict()
                record['url'] = url
                record['error'] = e
                record['_raw'] = util.tojson(response)
            yield record

dispatch(getUserCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 16
0
  '''
    Parse auth into python dict with correct method
    @proxy string: Comma separated auth params -> method,user,pass
    @return object/bool
  '''
  def parseAuth(self, auth):
    # Password could use commas, so just split 2 times
    auth = auth.rsplit(',', 2)

    # Use correcht auth method
    if auth[0].lower() == 'basic':
      return (auth[1].strip(), auth[2].strip())
    elif auth[0].lower() == 'digest':
      return HTTPDigestAuth(auth[0].strip(), auth[1].strip())

    # Return false in case of no valid method
    return False
    
  '''
    Convert headers string into dict
    @headers string: Headers as json string
    @return dict
  '''
  def parseHeaders(self, headers):
    # Replace single quotes with double quotes for valid json
    return json.loads(
      headers.replace('\'', '"')
    )

dispatch(curlCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 17
0
    """
    SLP = Option(
        doc=''' Temp Field ''', default='SLP')

    field = Option(
        doc=''' field to put value in ''',
        default='Pressure_mmHg')


    def stream(self, events):
        """

        :param events:
        :return:
        """

        for event in events:
            self.logger.debug("start")
            try:
                p = float(event[self.SLP])

                event[self.field] = round(25.4 * p, 2)
            except:
                event[self.field] = "N/A"

            yield event


dispatch(PmmHg, sys.argv, sys.stdin, sys.stdout, __name__)
import sys, time
from splunklib.searchcommands import \
    dispatch, GeneratingCommand, Configuration, Option, validators


@Configuration()
class GenerateHelloCommand(GeneratingCommand):
    count = Option(require=True, validate=validators.Integer())

    def generate(self):
        for i in range(1, self.count + 1):
            text = 'Hello World %d' % i
            yield {'_time': time.time(), 'event_no': i, '_raw': text}


dispatch(GenerateHelloCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 19
0
        username = conf['user']
        password = conf['password']
        url = conf['url']
        value_replacements = conf['value_replacements']
        daysAgo = self.daysAgo
        daysBy = self.daysAgo
        filters = self.filters
        table = self.table
        limit = self.limit
        bfilter = {}
        filters = filters.split(',') if filters else []
        exuded = []
        for x in filters:
            k, v = x.split('=')
            if k in bfilter:
                bfilter[k].append(v)
            else:
                bfilter[k] = []
                bfilter[k].append(v)
        snownow = snow(url, username, password)
        snownow.replacementsdict(value_replacements)
        for k, v in bfilter.iteritems():
            exuded.append(snownow.filterbuilder(k, v))
        url = snownow.reqencode(exuded, table=table, timeby=daysBy, days=daysAgo)
        for record in snownow.getrecords(url, limit):
            record = snownow.updaterecord(record, sourcetype='snow', lookup=True)
            record['_raw'] = util.tojson(record)
            yield record

dispatch(snowNowCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 20
0
                                   
                                   
                        record['investigation_results'] = investigation_results

                else:
                    record['investigation_results'] = "no investigations for this story found"

            yield {

            '_time': time.time(),
            'sourcetype': "_json",
            'story': record['story'],
            'executed_by': record['executed_by'],
            'detection_result_count': record['detection_result_count'],
            'detection_search_name': record['detection_search_name'],
            'first_detection_time': record['first_detection_time'],
            'last_detection_time': record['last_detection_time'],
            'support_search_name': record['support_search_name'],
            'entities': record['entities'],
            'mappings': record['mappings'],
            'detection_results' : record['detection_results'],
            'investigation_results' : record['investigation_results'],
            'investigation_search_failed' : self.search_failed
                  }

        self.logger.info("investigate.pym - ----------Finishing Investigation -------")


if __name__ == "__main__":
    dispatch(Investigate, sys.argv, sys.stdin, sys.stdout, __name__)
    fields = Option(
        doc='''
        **Syntax:** **fields=***<fieldname>*
        **Description:** Comma seperated list of fields in results''',
        require=False)

    def stream(self, records):
        # retrieve system information regarding search
        searchinfo = self.metadata.searchinfo
        app_conf = AppConf(searchinfo.splunkd_uri, searchinfo.session_key)
        password = app_conf.get_password()

        # loads custom config
        config = app_conf.get_config('customcommand')

        # create outbound JSON message body
        fields = self.fields.split(',') if self.fields else []

        for record in records:
            record['a_fields'] = '%s' % fields
            record['a_password'] = password
            record['a_combine'] = "%s%s" % (record['sourcetype'], record['source'])
            record['a_config'] = config
            record['a_command_metadata'] = '%s' % self.metadata

            yield record

dispatch(customCommand, sys.argv, sys.stdin, sys.stdout, __name__)


Esempio n. 22
0
class snowReportCommand(GeneratingCommand):

    report = Option(require=True)
    env = Option(require=False)

    def generate(self):
        searchinfo = self.metadata.searchinfo
        self.logger.debug(' excuted by username="******" %s', searchinfo.username,
                          ' '.join(searchinfo.args))
        app = AppConf(searchinfo.splunkd_uri, searchinfo.session_key)
        env = self.env.lower() if self.env else 'production'
        conf = app.get_config('getsnow')[env]
        snowreport = snow(conf['url'], conf['user'], conf['password'])
        url = snowreport.reqencode('rep_title={}'.format(self.report),
                                   table='report_home_details')
        for report in snowreport.getrecords(url):
            fields_list = report['rep_field_list'].split(',')
            fields_list.append('sys_created_on')
            url = snowreport.reqencode(urllib.quote_plus(report['rep_filter']),
                                       table=report['rep_table'],
                                       sysparm_fields=fields_list)
            for record in snowreport.getrecords(url):
                record = snowreport.updaterecord(record,
                                                 sourcetype='snow:report')
                record['_raw'] = json.dumps(record)
                record = dictexpand(record)
                yield record


dispatch(snowReportCommand, sys.argv, sys.stdin, sys.stdout, __name__)
        self.logger.debug('decodeCommand: %s' % self)  # logs command line

        if type == 'urlsafe_base64':
            decodeMethod = urlsafe_b64decode
        else:
            decodeMethod = b64decode

        for record in records:

            for fieldname in self.fieldnames:

                record["TTTTTTT"] = record[fieldname]

                try:
                    decodeStr = record[fieldname]

                    if self.autofix:
                        # Fixes padding sign to the correct Base64 equals padding symbol
                        decodeStr = sub(r'[^0-9a-zA-Z+/]','=', record[fieldname])

                    record[fieldname] = decodeMethod(decodeStr)

                except Exception, e:
                    record[fieldname] = "[Error] Can't decode: " + e

            yield record


dispatch(DecodeCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 24
0
                        logger.info(
                            "No events found, please increase the search timespan to have more search results."
                        )
                    tot_time_end = time.time()
                    logger.debug(
                        "Total execution time => {}".format(tot_time_end -
                                                            tot_time_start))
                except Exception:
                    logger.info(
                        "Exception occured while adding the noise and RIOT status to the events, Error: {}"
                        .format(traceback.format_exc()))
                    self.write_error(
                        "Exception occured while adding the noise and RIOT status of "
                        "the IP addresses to events. See greynoise_main.log for more details."
                    )

        else:
            logger.error(
                "Please specify exactly one parameter from ip and ip_field with some value."
            )
            self.write_error(
                "Please specify exactly one parameter from ip and ip_field with some value."
            )

    def __init__(self):
        """Initialize custom command class."""
        super(GNQuickCommand, self).__init__()


dispatch(GNQuickCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 25
0
        doc=''' Temp Field ''', default='SLP')




    field = Option(
        doc=''' field to put value in ''',
        default='kilopascals')


    def stream(self, events):
        """

        :param events:
        :return:
        """

        for event in events:
            self.logger.debug("start")
            try:
                p = float(event[self.SLP])

                event[self.field] = round(  33.8639 * (  p /10) ,2)
            except:
                event[self.field] = "N/A"

            yield event


dispatch(Kilopascals, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 26
0
        """Generate events to Splunk"""

        # Get config
        config = self._get_search_config()

        # Create Elasticsearch client
        esclient = Elasticsearch(config[KEY_CONFIG_EADDR],
                                 verify_certs=config[KEY_CONFIG_VERIFY_CERTS],
                                 use_ssl=config[KEY_CONFIG_USE_SSL])

        if self.action == ACTION_SEARCH:
            return self._search(esclient, config)
        if self.action == ACTION_INDICES_LIST:
            return self._list_indices(esclient)
        if self.action == ACTION_CLUSTER_HEALTH:
            return self._cluster_health(esclient)


def _flattern(key, data):
    result = {}
    for inkey in data:
        if isinstance(data[inkey], dict):
            for inkey2, value in _flattern(inkey, data[inkey]).items():
                result[key + "." + inkey2] = value
        else:
            result[key + "." + inkey] = data[inkey]
    return result


dispatch(ElasticSplunk, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 27
0
                            elif six.text_type(record['detected']) == "0":
                                tech['color'] = YELLOW_DK
                            elif six.text_type(record['detected']) == "1":
                                tech['color'] = BLUE_1
                            elif six.text_type(record['detected']) == "2":
                                tech['color'] = BLUE_2
                            elif six.text_type(record['detected']) == "3":
                                tech['color'] = BLUE_3
                            elif six.text_type(record['detected']) == "4":
                                tech['color'] = BLUE_4
            else:
                record['_raw'] = "Error no field with that name exists {}".format(self.atkfield)
                raise Exception("Error no field with that name exists {}".format(self.atkfield))
            yield record
        # if the user passes a name arg then create the new kvstore entry for that new layer
        # will want to update this code in the future to handle error cases better, and user feedback

        if self.layername is not None:
            default_layer['_key'] = self.layername
            status = self.saveCustomLayer(default_layer)
            self.logger.debug('custom layer file requested: {}'.format(status))
        # post updated default layer if all was successful & we didn't get a name argument
        else:    
            r, c = splunk.rest.simpleRequest(DEFAULT_URI, jsonargs=json.dumps(default_layer), sessionKey=self.metadata.searchinfo.session_key, rawResult=True)    
            if r.status == 200:
                self.logger.debug('updated default layer successfully: {}'.format(json.loads(c)))
            else:
                self.logger.debug('error updating default layer successfully: {}'.format(json.loads(c)))

dispatch(genatklayerCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 28
0
    query = Option(name="query", require=True)
    field_names = ['digsig_publisher', 'digsig_result', 'digsig_sign_time', 'host_count', 'is_executable_image',
                   'last_seen', 'original_filename', 'os_type', 'product_name', 'product_version', 'md5']

    def prepare(self):
        configuration_dict = splunk.clilib.cli_common.getConfStanza('carbonblack', 'cbserver')

        self.cb_server = configuration_dict['cburl']
        token = configuration_dict['cbapikey']

        self.cb = CbApi(self.cb_server, token=token, ssl_verify=False)

    def generate(self):
        for bindata in self.cb.binary_search_iter(self.query):
            self.logger.info("yielding binary %s" % bindata["md5"])
            rawdata = dict((field_name, bindata.get(field_name, "")) for field_name in self.field_names)
        try:
            rawdata
            synthevent = {'sourcetype': 'bit9:carbonblack:json', '_time': time.time(), 'source': self.cb_server,
                          '_raw': rawdata}
            yield synthevent
        except Exception:
            synthevent = {'sourcetype': 'bit9:carbonblack:json', '_time': time.time(), 'source': self.cb_server,
                          '_raw': '{"Error":"MD5 not found"}'}
            yield synthevent


if __name__ == '__main__':
    dispatch(BinarySearchCommand, sys.argv, sys.stdin, sys.stdout, __name__)
#!/usr/bin/env python

import requests
import json
import sys, time
from splunklib.searchcommands import \
    dispatch, GeneratingCommand, Configuration, Option, validators

@Configuration()
class PasGetUserInfoCommand(GeneratingCommand):
	user = Option(require=True)

	def generate(self):
		url = 'http://localhost:5000/user_list/api/v1.0/users/' + self.user
		data = requests.get(url).json()
		if 'user' in data:
			# Known user.
			row = {}
			for k, v in data['user'].iteritems():
				row[str(k)] = str(v)
			yield row
		else:
			# Unknown user. Return no data.
			pass
dispatch(PasGetUserInfoCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 30
0
            for group in record.keys():
                if group not in total:
                    total[group] = {}

                for field in record[group].keys():
                    if field not in total[group]:
                        total[group][field] = {}

                    for value in record[group][field].keys():
                        if value not in total[group][field]:
                            total[group][field][value] = defaultdict(int)
                        else:
                            total[group][field][value] += total[group][field][
                                value]

        for group in total.keys():
            for field in total[group].keys():
                total = 0
                for value in total[group][field].keys():
                    if value == 'Sum':
                        continue
                    frequency = total[group][field][value] / total[group][
                        field]["Sum"]
                    total += frequency * math.log(frequency, 2)
                total[group][field] = -total

        yield total


dispatch(EntropyCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 31
0
        doc=''' Temp Field ''', default='Winds')




    field = Option(
        doc=''' field to put value in ''',
        default='windfps')


    def stream(self, events):
        """

        :param events:
        :return:
        """

        for event in events:
            self.logger.debug("start")
            try:
                w = float(event[self.Winds])

                event[self.field] = round(  1.46667 * w ,2)
            except:
                event[self.field] = "N/A"

            yield event


dispatch(Wfps, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 32
0
        'properties': properties,
        'timestamp': timestamp,
        'eventType': eventType,
    }

    payload.append(eventDict)

    event_payload = json.dumps(eventDict)
    #self.logger.error(event_payload)


def send_payload(payload, target_url, token):
    body = BytesIO()
    with gzip.GzipFile(fileobj=body, mode="w") as fd:
        fd.write(json.dumps(payload))
    body.seek(0)

    resp = requests.post(
        target_url,
        headers={
            "X-SF-TOKEN": token,
            "Content-Encoding": "gzip",
            "Content-Type": "application/json"
        },
        data=body.read(),
    )
    return resp


dispatch(ToSFXCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 33
0
			target = model['target']
			fields = model['fields']
			if model.get('text'):
				if model['text'] == 'lsi':
					textmodel = LsiModel.load(args['model'].replace(".pkl",".%s" % model['text']))
				elif model['text'] == 'tfidf':
					textmodel = TfidfModel.load(args['model'].replace(".pkl",".%s" % model['text']))
				else:
					textmodel = model['text']
		except Exception as e:
			print >> sys.stderr, "ERROR", e
			channel.send({ 'error': "Couldn't find model %s" % args['model']})
		else:
			X, y_labels, textmodel = process_records(records, fields, target, textmodel=textmodel)

			print >> sys.stderr, X.shape
			y = est.predict(X)
			y_labels = encoder.inverse_transform(y)

			for i, record in enumerate(records):
				record['%s_predicted' % target] = y_labels.item(i)
				channel.send(record)

"""

    def __dir__(self):
        return ["model"]


dispatch(MCPredict, sys.argv, sys.stdin, sys.stdout, __name__)
    if self.limit is not None:
      url_params['limit'] = self.limit

    if self.offset is not None:
      url_params['offset'] = self.offset

    """Returns response for API request."""
    # Unsigned URL
    encoded_params = ''
    if url_params:
      encoded_params = urllib.urlencode(url_params)
    url = 'http://api.yelp.com/v2/search?%s' % (encoded_params)

    # Sign the URL
    consumer = oauth2.Consumer(consumer_key, consumer_secret)
    oauth_request = oauth2.Request('GET', url, {})
    oauth_request.update(
      {'oauth_nonce': oauth2.generate_nonce(),
      'oauth_timestamp': oauth2.generate_timestamp(),
      'oauth_token': token,
      'oauth_consumer_key': consumer_key})

    token = oauth2.Token(token, token_secret)
    oauth_request.sign_request(oauth2.SignatureMethod_HMAC_SHA1(), 
      consumer, token)
    signed_url = oauth_request.to_url()
    return signed_url

dispatch(YelpCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 35
0
    Count the number of words in the `text` of each tweet in tweets.csv and store the result in `word_count`.

    .. code-block::
        | inputlookup tweets | countmatches fieldname=word_count pattern="\\w+" text

    """
    fieldname = Option(
        doc='''
        **Syntax:** **fieldname=***<fieldname>*
        **Description:** Name of the field that will hold the match count''',
        require=True, validate=validators.Fieldname())

    pattern = Option(
        doc='''
        **Syntax:** **pattern=***<regular-expression>*
        **Description:** Regular expression pattern to match''',
        require=True, validate=validators.RegularExpression())

    def stream(self, records):
        self.logger.debug('CountMatchesCommand: %s', self)  # logs command line
        pattern = self.pattern
        for record in records:
            count = 0
            for fieldname in self.fieldnames:
                matches = pattern.findall(six.text_type(record[fieldname].decode("utf-8")))
                count += len(matches)
            record[self.fieldname] = count
            yield record

dispatch(CountMatchesCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 36
0
        CONF_FILE = make_splunkhome_path(['etc', 'apps', 'SA-rsearch', 'bin', 'rsearch.config'])
        Config = ConfigParser.ConfigParser()
        parsed_conf_files = Config.read(CONF_FILE)
        if not CONF_FILE in parsed_conf_files:
           logger_admin.error('Could not read config file: %s' % (CONF_FILE))
        USER = Config.get('rsearch', 'USER')
        PASSWORD = Config.get('rsearch', 'PASS')
        HOST = Config.get('rsearch', 'HOST')
        PORT = Config.get('rsearch', 'PORT')
        resultstoreturn = []

        try:
            searchquery = """
            | inputlookup employeeinfo.csv | search user={}
            """.format(user)
            kwargs_oneshot = {'count': 0}
            service = client.connect(host=HOST, port=PORT, username=USER, password=PASSWORD)
            searchresults = service.jobs.oneshot(searchquery, **kwargs_oneshot)
            reader = results.ResultsReader(searchresults)
            if reader:
                for item in reader:
                    resultstoreturn.append(item)
        except:
            logger_admin.exception('Error executing search.')

        return resultstoreturn

    
if __name__ == "__main__":
    dispatch(rinputlookupCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 37
0
        default='new_wind_chill')

    def stream(self, events):
        """

        :param events:
        :return:
        """

        for event in events:
            self.logger.debug("start")
            try:
                temp = float(event[self.Temp])
                RH = float(event[self.Relh])
                wind = float(event[self.Winds])


                old_chil = round((0.0817*(3.71*(pow(wind, 0.5)) + 5.81-0.25*wind)*(temp-91.4)+91.4),2)
                new_chil = round(((35.74+0.6215*temp-35.75*pow(wind,0.16)+0.4275*temp*pow(wind,0.16))),2)


                event[self.field] = round(old_chil,2)
                event[self.field2] = round(new_chil,2)
            except:
                event[self.field] =  "N/A"
                event[self.field2] = "N/A"
            yield event


dispatch(WindChill, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 38
0
        doc=''' Temp Field ''', default='SLP')




    field = Option(
        doc=''' field to put value in ''',
        default='psi')


    def stream(self, events):
        """

        :param events:
        :return:
        """

        for event in events:
            self.logger.debug("start")
            try:
                p = float(event[self.SLP])

                event[self.field] = round(  0.491154  * p ,2)
            except:
                event[self.field] = "N/A"

            yield event


dispatch(Psi, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 39
0
        doc=''' field to put value in ''',
        default='heatIndex')


    def stream(self, events):
        """

        :param events:
        :return:
        """

        for event in events:

            self.logger.debug("start")
            try:
                T = float(event[self.Temp])
                RH = float(event[self.Relh])


                HI =(0.5 * (T + 61.0 + (T-68.0)*1.2 + (RH*0.094)))


                event[self.field] = round(HI,2)
            except:
                event[self.field] =  "N/A"

            yield event


dispatch(HeatIndex, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 40
0
			textmodel.save(args['model'].replace(".pkl",".%s" % args['textmodel']))
		elif args['textmodel'] == 'hashing':
			model['text'] = args['textmodel']

		joblib.dump(model, args['model'])

		print >> sys.stderr, "END"

		channel.send({ 
			'model': args['model'], 
			'score': score.item() if score else None, 
			'training_size': X_train.shape[0],
			'test_size': X_test.shape[0]
		})

"""

    def __dir__(self):
        return ["reset", "model", "textmodel", "test_size", "target", "_C"]

    @Configuration(clear_required_fields=True)
    def map(self, records):
        try:
            for record in records:
                yield record
        except:
            yield {}


dispatch(MCTrain, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 41
0
                            event_id = hashlib.md5(incident[0]['incident_id'].encode('utf-8') + now.encode('utf-8')).hexdigest()
                            event = 'time="{}" severity=INFO origin="ModifyIncidentsCommand" event_id="{}" user="******" action="change" incident_id="{}" {}="{}" previous_{}="{}"'.format(now, event_id, user, incident[0]['incident_id'], key, attrs[key], key, incident[0][key])
                            
                            input.submit(event, hostname = socket.gethostname(), sourcetype = 'incident_change', source = 'modifyincidents.py', index = self.config['index'])

                            incident[0][key] = attrs[key]

                    if len(changed_keys) > 0:
                        uri = '/servicesNS/nobody/alert_manager/storage/collections/data/incidents/' + incident[0]['_key']
                        del incident[0]['_key']
                        contentsStr = json.dumps(incident[0])
                        serverResponse, serverContent = rest.simpleRequest(uri, sessionKey=sessionKey, jsonargs=contentsStr)

                    if self.comment:
                        self.comment = self.comment.replace('\n', '<br />').replace('\r', '')
                        event_id = hashlib.md5(incident[0]['incident_id'].encode('utf-8') + now.encode('utf-8')).hexdigest()
                        event = 'time="{}" severity=INFO origin="ModifyIncidentsCommand" event_id="{}" user="******" action="comment" incident_id="{}" comment="{}"'.format(now, event_id, user, incident[0]['incident_id'], self.comment)
                        event = event.encode('utf8')
                        input.submit(event, hostname = socket.gethostname(), sourcetype = 'incident_change', source = 'modifyincidents.py', index = self.config['index'])

                else:                        
                    self.logger.warn("No attributes to modify found, aborting.")

            else:
                self.logger.warn("No incident_id field found in event, aborting.")  

            yield record
       

dispatch(ModifyIncidentsCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 42
0
            		field = self.field
            else:
            	if 'mac_address' in record:
            		mac_address = record['mac_address']
            		field = 'mac_address'

            if mac_address != None and field != None:
            
            	url = 'http://www.macvendorlookup.com/api/v2/%s' % mac_address

            	try:
                	urlHandle = urllib.urlopen(url)
                	if urlHandle.getcode() == 200:
                		content = urlHandle.read()
                		content = json.loads(content)

                		record[field + '_vendor'] = content[0]['company']
                		record[field + '_vendor_country'] = content[0]['country']

                except Exception as e:
			        exc_type, exc_obj, exc_tb = sys.exc_info()
			        self.logger.error("Unable to open url %s. Reason: %s. Line: %s" % (url, exc_type, exc_tb.tb_lineno))

            else:
                self.logger.warn("No mac_address field found in event, aborting.")  

            yield record
       

dispatch(GetMacVendor, sys.argv, sys.stdin, sys.stdout, __name__)
    .. code-block::
        | inputlookup tweets | countmatches fieldname=word_count pattern="\\w+" text

    """
    fieldname = Option(doc='''
        **Syntax:** **fieldname=***<fieldname>*
        **Description:** Name of the field that will hold the match count''',
                       require=True,
                       validate=validators.Fieldname())

    pattern = Option(doc='''
        **Syntax:** **pattern=***<regular-expression>*
        **Description:** Regular expression pattern to match''',
                     require=True,
                     validate=validators.RegularExpression())

    def stream(self, records):
        self.logger.debug('CountMatchesCommand: %s', self)  # logs command line
        pattern = self.pattern
        for record in records:
            count = 0L
            for fieldname in self.fieldnames:
                matches = pattern.findall(
                    unicode(record[fieldname].decode("utf-8")))
                count += len(matches)
            record[self.fieldname] = count
            yield record


dispatch(CountMatchesCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 44
0
        if proxies['https'] is not None:
            proxy = urllib2.ProxyHandler(proxies)
            opener = urllib2.build_opener(proxy)
            urllib2.install_opener(opener)

        try:
            url = urllib2.urlopen(
                "https://raw.githubusercontent.com/ejrv/VPNs/master/vpn-ipv4.txt"
            )
        except:
            raise Exception("Please check app proxy settings")

        if url.getcode() == 200:

            for line in url.read().split('\n'):
                if line.startswith('#') or not line:
                    # ignore blank lines and lines starting with #
                    continue
                if '/' not in line:
                    # CIDR notation
                    line = line.strip() + '/32'

                yield {'ip': line, 'vpn': 'true'}

        else:
            raise Exception("Received response: " + url.getcode())


dispatch(VPNGenCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 45
0
    def processDate(self, event, field):
        try:
            timestamp = float(event[field])
            value = repr(datetime.date.fromtimestamp(timestamp))
            return eval("humanize." + self.command + "(" + value + ")")
        except ValueError:
            pass

    def processTime(self, event, field):
        try:
            timestamp = float(event[field])
            value = repr(datetime.datetime.fromtimestamp(timestamp))
            return eval("humanize." + self.command + "(" + value + ")")
        except ValueError:
            pass

    def stream(self, events):
        self.logger.debug('HumanizeCommand: {}\n {}'.format(self, self.command))  # logs command line
        for event in events:
            for field in self.fieldnames:
                if self.command in ['naturalday', 'naturaldate'] and field in event and len(event[field]) > 0:
                    event[field] = self.processDate(event, field)
                elif self.command == 'naturaltime' and field in event and len(event[field]) > 0:
                    event[field] = self.processTime(event, field)
                elif field in event and len(event[field]) > 0:
                    event[field] = eval("humanize." + self.command + "(" + event[field] + ")")
            yield event


dispatch(HumanizeCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 46
0
        logger = logging.getLogger('splunk.' + FILENAME)
        logger.setLevel(logging.INFO)
        SPLUNK_HOME = os.environ['SPLUNK_HOME']

        LOGGING_DEFAULT_CONFIG_FILE = os.path.join(SPLUNK_HOME, 'etc',
                                                   'log.cfg')
        LOGGING_LOCAL_CONFIG_FILE = os.path.join(SPLUNK_HOME, 'etc',
                                                 'log-local.cfg')
        LOGGING_STANZA_NAME = 'python'
        LOGGING_FILE_NAME = FILENAME + ".log"
        BASE_LOG_PATH = os.path.join('var', 'log', 'splunk')
        LOGGING_FORMAT = "%(asctime)s %(levelname)-s\t%(module)s:%(lineno)d - %(message)s"

        splunk_log_handler = logging.handlers.RotatingFileHandler(os.path.join(
            SPLUNK_HOME, BASE_LOG_PATH, LOGGING_FILE_NAME),
                                                                  mode='a')
        splunk_log_handler.setFormatter(logging.Formatter(LOGGING_FORMAT))
        logger.addHandler(splunk_log_handler)
        splunk.setupSplunkLogger(logger, LOGGING_DEFAULT_CONFIG_FILE,
                                 LOGGING_LOCAL_CONFIG_FILE,
                                 LOGGING_STANZA_NAME)

        # some logging in case of successful initialization.
        logger.info("Script started and logging initialized correctly.")

        return logger


# run the command specified in the class above
dispatch(TrafficInvFetchingCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 47
0
                                to_ids.append(r['misp_to_ids'])
                            v['misp_to_ids'] = to_ids
                            category = v['misp_category']
                            # append
                            if r['misp_category'] not in category:
                                category.append(r['misp_category'])
                            v['misp_category'] = category
                            attribute_uuid = v['misp_attribute_uuid']
                            if r['misp_attribute_uuid'] not in attribute_uuid:
                                attribute_uuid.append(r['misp_attribute_uuid'])
                            v['misp_attribute_uuid'] = attribute_uuid
                            if is_object_member is False:
                                misp_type = r['misp_type'] \
                                    + '|' + v['misp_type']
                                v['misp_type'] = misp_type
                                misp_value = r['misp_value'] + \
                                    '|' + v['misp_value']
                                v['misp_value'] = misp_value
                            output_dict[key] = dict(v)
            for k, v in list(output_dict.items()):
                yield v


if __name__ == "__main__":
    # set up logging suitable for splunkd consumption
    logging.root
    loglevel = logging_level()
    logging.error('logging level is set to %s', loglevel)
    logging.root.setLevel(loglevel)
    dispatch(mispgetevent, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 48
0
    ##Syntax


    ##Description


    ##Example


    """
    field = Option(doc='''
        **Syntax:** **field=<fieldname>*
        **Description:** ''',
                   require=True)

    def stream(self, events):

        for event in events:

            if self.field in event:

                if len(event[self.field]) > 0:

                    for value in event[self.field]:
                        event[self.field + "_mvbm_" + value] = 1

            yield event


dispatch(MVBMCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 49
0
        url = 'https://localhost:8089/servicesNS/%s/%s/directory' % (self.objowner, self.appname)

        if self.objtype == 'views':
            url = url + '?search=eai:location%3D/data/ui/views'
        elif self.objtype == 'extractions':
            url = url + '?search=eai:location%3D/data/props/extractions'
        elif self.objtype == 'transforms':
            url = url + '?search=eai:location%3D/data/transforms/extractions'
        elif self.objtype == 'savedsearches':
            url = url + '?search=eai:location%3D/saved/searches'
        elif self.objtype == 'macros':
            url = url + '?search=eai:location%3D/data/macros'
        elif self.objtype == 'datamodels':
            url = 'https://localhost:8089/servicesNS/%s/%s/datamodel/model?count=0' % (self.objowner, self.appname)
        url = url + '&search=eai:acl.app%3D' + self.appname + '&count=0&output_mode=json'

        #Hardcoded user credentials
        attempt = requests.get(url, verify=False, auth=HTTPBasicAuth('admin', 'changeme'))
        if attempt.status_code != 200:
            yield {'result': 'Unknown failure, received a non-200 response code of %s on the URL %s, text result is %s' % (attempt.status_code, url, attempt.text)}
            return

        #We received a response but it could be a globally shared object and not one from this app so we now need to check
        all_found_objects = json.loads(attempt.text)['entry']
        #Only list the objects that are private
        objects = {object['name']: object['acl']['owner'] for object in all_found_objects if object['acl']['sharing'] == 'user'}
        for object_name in objects:
            yield { 'result': object_name, 'owner': objects[object_name] }

dispatch(ListPrivateObjectsCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 50
0
    """
    inputfield = Option(doc='''
        **Syntax:** **inputfield=***<fieldname>*
        **Description:** Name of the field that holds the header value''',
                        require=True,
                        validate=validators.Fieldname())

    outputfield = Option(doc='''
        **Syntax:** **outputfield=***<fieldname>*
        **Description:** Name of the field that will hold the decoded header value''',
                         require=True,
                         validate=validators.Fieldname())

    def stream(self, records):
        self.logger.debug('DecodeHeaderCommand: %s', self)  # logs command line
        default_charset = 'ASCII'
        for record in records:
            if self.inputfield in record:
                try:
                    dh = header.decode_header(record[self.inputfield])
                    s = ''.join(
                        [unicode(t[0], t[1] or default_charset) for t in dh])
                    record[self.outputfield] = s
                except Exception as e:
                    record[self.outputfield + '_err'] = str(e)
                yield record


dispatch(DecodeHeaderCommand, sys.argv, sys.stdin, sys.stdout, __name__)
                            # set default threshold if option is empty
                            c = self.threshold or 1.5
                            a = -4
                            b = 3
                            mc = float(response_json["medcouple"])
                            # asymmetric standard boxplot rule bounds for positive MC are [Q1 – c * exp(a * MC) * IQD, Q3 + c * exp(b * MC) * IQD ]
                            # asymmetric standard boxplot rule bounds for negative MC are [Q1 – c * exp(-b * MC) * IQD, Q3 + c * exp(-a * MC) * IQD ]
                            # calculate the IQD (inter quartile distance)
                            iqd = response_json["pct75"] - response_json["pct25"]
                            if mc >= 0:
                                lower_adjustment_factor = c * math.exp(a * mc) * iqd
                                upper_adjustment_factor = c * math.exp(b * mc) * iqd
                            else:
                                lower_adjustment_factor = c * math.exp(-b * mc) * iqd
                                upper_adjustment_factor = c * math.exp(-a * mc) * iqd
                            lower_bound = response_json["pct25"] - lower_adjustment_factor
                            upper_bound = response_json["pct75"] + upper_adjustment_factor

                        # combine lower and upper bound into a list
                        bounds = [lower_bound, upper_bound]
                        # score the value according to the calculated bounds
                        new_record[fieldname+":score"] = map_score(float(new_record[fieldname]), bounds)
                        # add the bounds and retrieved values if debug is true
                        if self.debug:
                            new_record[fieldname+":bounds"] = bounds
                            new_record[fieldname+":stats"] = response_json

            yield new_record

dispatch(CompareToBaselineCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 52
0
    seed = Option(
        doc='''**Syntax:** **seed=***<string>*
        **Description:** Value for initializing the random number generator ''')

    def generate(self):
        """ Yields one random record at a time for the duration of `duration` """
        self.logger.debug('SimulateCommand: %s' % self)  # log command line
        if not self.records:
            if self.seed is not None:
                random.seed(self.seed)
            self.records = [record for record in csv.DictReader(self.csv_file)]
            self.lambda_value = 1.0 / (self.rate / float(self.interval))
        duration = self.duration
        while duration > 0:
            count = long(round(random.expovariate(self.lambda_value)))
            start_time = time.clock()
            for record in random.sample(self.records, count):
                yield record
            interval = time.clock() - start_time
            if interval < self.interval:
                time.sleep(self.interval - interval)
            duration -= max(interval, self.interval)
        return

    def __init__(self):
        super(SimulateCommand, self).__init__()
        self.lambda_value = None
        self.records = None

dispatch(SimulateCommand, sys.argv, sys.stdin, sys.stdout, __name__)
def main():
    dispatch(ExtractPlatformCommand, sys.argv, sys.stdin, sys.stdout)
Esempio n. 54
0
                for record in correct_records:
                    links.append(record.get(self.field))
                    rs.append(record)

                results_dict = ipqualityscoreclient.url_checker_multithreaded(
                    links, strictness=self.strictness)

                for record in rs:
                    detection_result = results_dict.get(record[self.field])

                    if detection_result is not None:
                        for key, val in detection_result.items():
                            new_key = ipqualityscoreclient.get_prefix(
                            ) + "_" + key
                            record[new_key] = val
                        record[ipqualityscoreclient.get_prefix() +
                               "_status"] = 'api call success'
                    else:
                        record[ipqualityscoreclient.get_prefix() +
                               "_status"] = 'api call failed'

                    yield record
            else:
                raise Exception("No credentials have been found")
        else:
            raise Exception("There are no events with url field.")


if __name__ == "__main__":
    dispatch(URLCheckerCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 55
0
            if self.find_child:
                if self.find_child_attrs is not None:
                    soup = soup.findChild(
                        self.find_child, 
                        literal_eval('{'+self.find_child_attrs+'}')
                    )
                else:
                    soup = soup.findChild(self.find_child)
            if self.find_children:
                if self.find_children_attrs is not None:
                    soup = soup.findChildren(
                        self.find_children, 
                        literal_eval('{'+self.find_children_attrs+'}')
                    )
                else:
                    soup = soup.findChildren(self.find_children)
            if self.get_text and not (self.find_all or self.find_children):
                record[self.get_text_label] = \
                    soup.get_text().decode('unicode_escape').encode('ascii','ignore')
            elif self.get_text and (self.find_all or self.find_children):
                record[self.get_text_label] = [
                    i.get_text().decode('unicode_escape').encode('ascii','ignore')
                    for i in soup
                ]
            else:
                record['soup'] = soup

            yield record

dispatch(Bs4, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 56
0
                # Resolve the variables
                for lookup_field, event_field in list(
                        variable_output_fields.items()):
                    if event_field in list(e.keys()):
                        if e[event_field] is not None:
                            resolved_variables[groupby_value][
                                lookup_field] = e[event_field]
                            new_kv_record[lookup_field] = e[event_field]

                # Write the new kvstore record and get the ID (_key)
                response = obj_collection.data.insert(
                    json.dumps(new_kv_record))
                kvstore_entry_key = response["_key"]
                resolved_variables[groupby_value]["_key"] = kvstore_entry_key

                # Write the data to disk immediately so other threads can benefit
                with open(resolved_variables_file, 'w') as f:
                    f.write(json.dumps(resolved_variables, ensure_ascii=False))
                    inserts += 1

            # Write the KV store record's _key value to the event
            e[self.outputkeyfield] = kvstore_entry_key

            yield e
            i += 1
        logger.info("Modified %d events and inserted %s new records into %s" %
                    (i, inserts, self.collection))


dispatch(kvstore_createfkCommand, sys.argv, sys.stdin, sys.stdout, __name__)
Esempio n. 57
0
            if cnt % batch == 0:
                # batch level reached poll to get producer to move messages out
                eprint(
                    "export2kafka - batch reached, calling poll... processed records: "
                    + str(cnt))
                for p in producers:
                    p.poll(0)

            if cnt % 10 == 0 and int(
                    time.time()) > (60 * timeout) + self.start_time:
                # quit after timeout has been reached, only check every 10 records
                eprint("export2kafka - timeout reached, stopping search...")
                break

            # return record for display in Splunk
            yield record

        eprint(
            "export2kafka - all records processed for stream... processed records: "
            + str(cnt))
        eprint("export2kafka - calling flush...")
        for p in producers:
            p.flush()
        eprint("export2kafka - flush finished...")
        eprint("export2kafka - stream finished")


if __name__ == "__main__":
    dispatch(FileSinkCommand, sys.argv, sys.stdin, sys.stdout, __name__)