Exemple #1
0
def main():
    try:
        output_fields = ['_time']
        output_results = []
        search_results, dummyresults, settings = intersplunk.getOrganizedResults(
        )
        if search_results is None or len(search_results) == 0:
            intersplunk.outputResults(output_results, fields=output_fields)
            return

        fields = search_results[0].keys()
        is_field_valid, is_detection_needed = check_fields(fields)
        if not is_field_valid:
            intersplunk.parseError(
                'This visualization requires timestamped, evenly spaced numeric time-series data. Try using the timechart command in your query.'
            )

        if not is_detection_needed:
            intersplunk.outputResults(search_results,
                                      fields=search_results[0].keys())
            return

        output_results, output_fields = wrap_anomaly_detection(search_results)
        intersplunk.outputResults(output_results, fields=output_fields)
    except:
        stack = traceback.format_exc()
        results = intersplunk.generateErrorResults("Error : Traceback: " +
                                                   str(stack))
        intersplunk.outputResults(results)
Exemple #2
0
 def _handle_response(self, response, content):
     if response.status in (200, 201):
         resp = self._get_resp_record(content)
         if resp:
             result = self._get_result(resp)
         else:
             result = {"error": "Failed to create ticket"}
         self.logger.debug(result)
         return result
     else:
         self.logger.error("Failed to create ticket. Return code is %s. "
                           "Reason is %s", response.status, response.reason)
         si.parseError("Failed to create ticket. Return code is {0}. Reason"
                       " is {1}".format(response.status, response.reason))
     return None
 def _handle_response(self, response, content):
     if response.status in (200, 201):
         resp = self._get_resp_record(content)
         if resp:
             result = self._get_result(resp)
         else:
             result = {"error": "Failed to create ticket"}
         return result
     else:
         self.logger.error(
             "Failed to create ticket. Return code is %s. "
             "Reason is %s", response.status, response.reason)
         si.parseError("Failed to create ticket. Return code is {0}. Reason"
                       " is {1}".format(response.status, response.reason))
     return None
Exemple #4
0
def wrap_anomaly_detection(search_results):
    fields = search_results[0].keys()
    detected_fields = list(
        filter(lambda x: x not in const.FILTER_FIELDS, fields))
    output_count = len(search_results) - algorithm.get_train_count()
    output_results = [{
        '_time':
        search_results[i + algorithm.get_train_count()]['_time']
    } for i in xrange(output_count)]  # initialize output
    output_fields = ['_time']
    outlier_count = 0
    for cur_field in detected_fields:
        output_fields += [
            'value_' + cur_field, 'outlier_' + cur_field,
            'severity_' + cur_field
        ]
        try:
            cur_data = [
                float(str(search_results[i][cur_field]))
                for i in xrange(len(search_results))
            ]
            outlier_indexes, severity_array = algorithm.anomaly_detection(
                cur_data)
            severity_index = 0
            outlier_count += len(outlier_indexes)
            for i in xrange(output_count):
                search_results_index = i + algorithm.get_train_count()

                output_results[i]['value_' + cur_field] = search_results[
                    search_results_index][cur_field]
                output_results[i][
                    'outlier_' +
                    cur_field] = search_results_index in outlier_indexes
                output_results[i]['severity_' + cur_field] = severity_array[
                    severity_index] if search_results_index in outlier_indexes else 0

                if search_results_index in outlier_indexes:
                    severity_index += 1

        except ValueError:
            intersplunk.parseError(
                'This command only supports numbers. Field %s is not numerical. '
                % cur_field)

    if outlier_count == 0:
        return [], output_fields
    else:
        return output_results, output_fields
Exemple #5
0
def main():
    p = pdns.PDNS()

    # Parse arguments from splunk search
    opts, kwargs = Intersplunk.getKeywordsAndOptions()
    limit = int(kwargs.get("limit", 25))

    events, _, _ = Intersplunk.getOrganizedResults()

    # Annotate events
    for event in events:
        value = []
        for field in opts:
            if event.get(field):
                value.append(event[field])

        if not value:
            continue

        query = {}
        answer = {}
        for val in value:
            try:
                for res in p.query(val, limit=limit):
                    if res["query"] != value:
                        query[res["query"]] = True
                    if res["answer"] != value:
                        answer[res["answer"]] = True
            except pdns.connectionError as e:
                Intersplunk.parseError(str(e))
                return
            except pdns.resourceLimitExceeded as e:
                Intersplunk.parseError(str(e))
                return

        if query:
            if "query" not in event:
                event["query"] = query.keys()

        if answer:
            if "answer" not in event:
                event["answer"] = answer.keys()

    Intersplunk.outputResults(events)
Exemple #6
0
def parseArgs(txt):
    m = re.match('\s*(?:(?P<count>-?\d+)\s+)?(?P<variables>.+)', txt)
    if m == None:
        si.parseError(usage())
    md = m.groupdict()
    counttext = md['count']
    count = 1
    if counttext != None:
        count = int(counttext)
        
    variables = md['variables']
    mapping = []

    matches = re.findall("(?i)\s*(?:(?P<alias>[a-z0-9_.]+)\s*[=])?\s*(?P<field>[$a-z0-9_.]+)", variables)
    for alias, value in matches:
        if value.startswith('$'):
            value = value[1:]
        elif alias == '':
            alias = value
        mapping.append((alias, value))
    return count, mapping
Exemple #7
0
def parseArgs(txt):
    m = re.match('\s*(?:(?P<count>-?\d+)\s+)?(?P<variables>.+)', txt)
    if m == None:
        si.parseError(usage())
    md = m.groupdict()
    counttext = md['count']
    count = 1
    if counttext != None:
        count = int(counttext)
        
    variables = md['variables']
    mapping = []

    matches = re.findall("(?i)\s*(?:(?P<alias>[a-z0-9_]+)\s*[=])?\s*(?P<field>[$a-z0-9_]+)", variables)
    for alias, value in matches:
        if value.startswith('$'):
            value = value[1:]
        elif alias == '':
            alias = value
        mapping.append((alias, value))
    return count, mapping
Exemple #8
0
def main():
    p = pdns.PDNS()

    # Parse arguments from splunk search
    opts, kwargs = Intersplunk.getKeywordsAndOptions()

    # Get limit from kwargs, but default to 25 if not specified
    limit = int(kwargs.get("limit", 25))

    results = []

    for value in opts:
        try:
            result = p.query(value, limit = limit)
        except pdns.connectionError as e:
            Intersplunk.parseError(str(e))
            return
        except pdns.resourceLimitExceeded as e:
            Intersplunk.parseError(str(e))
            return

        results += result
    Intersplunk.outputResults(results)
Exemple #9
0
def main():
    p = pdns.PDNS()

    # Parse arguments from splunk search
    opts, kwargs = Intersplunk.getKeywordsAndOptions()

    # Get limit from kwargs, but default to 25 if not specified
    limit = int(kwargs.get("limit", 25))

    results = []

    for value in opts:
        try:
            result = p.query(value, limit = limit)
        except pdns.connectionError as e:
            Intersplunk.parseError(str(e))
            return
        except pdns.resourceLimitExceeded as e:
            Intersplunk.parseError(str(e))
            return

        results += result
    Intersplunk.outputResults(results)


try:
    main()
except Exception as e:
    Intersplunk.parseError(traceback.format_exc())
 def error(self, message):
     si.parseError("{0}. {1}".format(message, self.format_usage()))
Exemple #11
0
def main():
    try:
        search_results, dummyresults, settings = intersplunk.getOrganizedResults(
        )
        session_key = settings['sessionKey']
        if len(sys.argv) == 2:
            # update aws price info
            if sys.argv[1] == 'info':
                task = AwsInfoTask(session_key)
                task.execute()
        elif len(sys.argv) == 5:
            # obtain price detail
            region = sys.argv[1]
            instance_type = sys.argv[2]
            product_os = sys.argv[3]
            tenancy = sys.argv[4]
            on_demand_hourly, reserved_one_all_yearly, reserved_one_partial_yearly, reserved_one_partial_hourly, reserved_one_no_hourly, currency = read_price(
                region, instance_type, product_os, tenancy, session_key)

            intersplunk.outputResults([{
                PRICE_ON_DEMAND_HOURLY: on_demand_hourly,
                PRICE_RESERVED_ONE_ALL_YEARLY: reserved_one_all_yearly,
                PRICE_RESERVED_ONE_PARTIAL_YEARLY: reserved_one_partial_yearly,
                PRICE_RESERVED_ONE_PARTIAL_HOURLY: reserved_one_partial_hourly,
                PRICE_RESERVED_ONE_NO_HOURLY: reserved_one_no_hourly,
                CURRENCY: currency
            }],
                                      fields=[
                                          PRICE_ON_DEMAND_HOURLY,
                                          PRICE_RESERVED_ONE_ALL_YEARLY,
                                          PRICE_RESERVED_ONE_PARTIAL_YEARLY,
                                          PRICE_RESERVED_ONE_PARTIAL_HOURLY,
                                          PRICE_RESERVED_ONE_NO_HOURLY,
                                          CURRENCY
                                      ])
        elif len(sys.argv) == 7:
            # calculate optimal RI, RI cost and on demand cost
            base = sys.argv[1]
            region = sys.argv[2]
            instance_type = sys.argv[3]
            purchase_option = sys.argv[4]
            product_os = sys.argv[5]
            tenancy = sys.argv[6]

            valid_days, message = get_valid_days_from_conf(session_key)
            if valid_days < 0:
                ri = 'N/A'
                ri_cost = 'N/A'
                instance_hours = []
                on_demand_hourly = 0
                currency = '$' if re.match(
                    r'cn-.*', region) == None else '\xc2\xa5'.decode('utf8')
            else:
                history_len, instance_hours = get_instance_hours(
                    base, search_results)
                # read price
                on_demand_hourly, reserved_one_all_yearly, reserved_one_partial_yearly, reserved_one_partial_hourly, reserved_one_no_hourly, currency = read_price(
                    region, instance_type, product_os, tenancy, session_key)

                if valid_days * HOURS_OF_DAY > history_len:
                    ri = 'N/A'
                    ri_cost = 'N/A'
                    message = 'It\'s required to have %d days\' data at least. You can update the setting in recommendation.conf' % (
                        valid_days)
                else:
                    if purchase_option == 'all':
                        ri, ri_cost, message = ri_wrap(
                            instance_hours, on_demand_hourly,
                            reserved_one_all_yearly / HOURS_OF_YEAR)
                    elif purchase_option == 'partial':
                        ri, ri_cost, message = ri_wrap(
                            instance_hours, on_demand_hourly,
                            reserved_one_partial_yearly / HOURS_OF_YEAR +
                            reserved_one_partial_hourly)
                    else:
                        ri, ri_cost, message = ri_wrap(instance_hours,
                                                       on_demand_hourly,
                                                       reserved_one_no_hourly)

            instance_hours_len = max(1, len(instance_hours))
            outputResults = []
            cur_line = {}
            cur_line[ON_DEMAND_COST] = int(
                round(on_demand_hourly * sum(instance_hours) /
                      instance_hours_len * HOURS_OF_YEAR))  # on demand cost
            cur_line[RI] = ri
            cur_line[RI_COST] = 'N/A' if ri_cost == 'N/A' else int(
                round(ri_cost / instance_hours_len * HOURS_OF_YEAR))  # RI cost
            cur_line[MESSAGE] = message
            cur_line[CURRENCY] = currency
            outputResults.append(cur_line)
            intersplunk.outputResults(
                outputResults,
                fields=[RI, RI_COST, ON_DEMAND_COST, MESSAGE, CURRENCY])
        else:
            intersplunk.parseError(
                "Arguments should be recommendation base, AZ, instance type, purchase option, os and tenancy."
            )
    except:
        stack = traceback.format_exc()
        results = intersplunk.generateErrorResults("Error : Traceback: " +
                                                   str(stack))
        intersplunk.outputResults(results)
import csv
import sys
import splunk.Intersplunk as intersplunk
import splunk.rest as rest
import urllib
import json
import re
import collections

#(isgetinfo, sys.argv) = intersplunk.isGetInfo(sys.argv)

if len(sys.argv) < 2:
    intersplunk.parseError("Please specify a valid incident_id")

#if isgetinfo:
#    intersplunk.outputInfo(False, False, True, False, None, True)
#    # outputInfo automatically calls sys.exit()

stdinArgs = sys.stdin.readline()
stdinArgs = stdinArgs.strip()
stdinArgs = stdinArgs[11:]
stdinArgs = urllib.unquote(stdinArgs).decode('utf8')
match = re.search(r'<authToken>([^<]+)</authToken>', stdinArgs)
sessionKey = match.group(1)

incident_id = sys.argv[1]

query = {}
query['incident_id'] = incident_id
uri = '/servicesNS/nobody/alert_manager/storage/collections/data/incident_results?query=%s' % urllib.quote(
    json.dumps(query))
Exemple #13
0
 def error(self, message):
     si.parseError("{0}. {1}".format(message, self.format_usage()))
 def _handle_error(self, msg="Failed to create ticket."):
     si.parseError(msg)
import csv
import sys
import splunk.Intersplunk as intersplunk
import splunk.rest as rest
import urllib
import json
import re
import collections

#(isgetinfo, sys.argv) = intersplunk.isGetInfo(sys.argv)

if len(sys.argv) < 2:
    intersplunk.parseError("Please specify a valid incident_id")

#if isgetinfo:
#    intersplunk.outputInfo(False, False, True, False, None, True)
#    # outputInfo automatically calls sys.exit()    

stdinArgs = sys.stdin.readline()
stdinArgs = stdinArgs.strip()
stdinArgs = stdinArgs[11:]
stdinArgs = urllib.unquote(stdinArgs).decode('utf8')
match = re.search(r'<authToken>([^<]+)</authToken>', stdinArgs)
sessionKey = match.group(1)

incident_id = sys.argv[1]

query = {}
query['incident_id'] = incident_id
uri = '/servicesNS/nobody/alert_manager/storage/collections/data/incident_results?query=%s' % urllib.quote(json.dumps(query))
serverResponse, serverContent = rest.simpleRequest(uri, sessionKey=sessionKey)
Exemple #16
0
                pmList.append(key)
        result = {}
        results = []
        result['Placemarks'] = ', '.join(str(x) for x in pmList)
        results.append(result)
        si.outputResults(results)
    except Exception, e2:
        #stack2 =  traceback.format_exc()
        si.generateErrorResults("Error '%s'" % e2)


# Usage
def usage():
    si.generateErrorResults("Script usage: | kml_lookup lat=<lat> lon=<lon>")
    exit(-1)


# Main
if __name__ == '__main__':
    try:
        keywords, argvals = si.getKeywordsAndOptions()
        lat = argvals.get("lat", 0)
        lon = argvals.get("lon", 0)

        if not lat or not lon:
            usage()

        getPlacemarks(lat, lon)
    except Exception, e:
        raise si.parseError(e)
import csv
import sys
import splunk.Intersplunk as intersplunk
import splunk.rest as rest
import urllib
import json
import re
import collections

#(isgetinfo, sys.argv) = intersplunk.isGetInfo(sys.argv)

if len(sys.argv) < 2:
    intersplunk.parseError("Please specify a valid risk_id")

#if isgetinfo:
#    intersplunk.outputInfo(False, False, True, False, None, True)
#    # outputInfo automatically calls sys.exit()    

stdinArgs = sys.stdin.readline()
stdinArgs = stdinArgs.strip()
stdinArgs = stdinArgs[11:]
stdinArgs = urllib.unquote(stdinArgs).decode('utf8')
match = re.search(r'<authToken>([^<]+)</authToken>', stdinArgs)
sessionKey = match.group(1)

risk_id = sys.argv[1]

query = {}
query['risk_id'] = risk_id
uri = '/servicesNS/nobody/risk_manager/storage/collections/data/risk_results?query=%s' % urllib.quote(json.dumps(query))
serverResponse, serverContent = rest.simpleRequest(uri, sessionKey=sessionKey)
#   Version 4.0
import sys
import splunk.Intersplunk as isp

if len(sys.argv) != 3:
    isp.parseError(
        "Usage: indextimerange <earliest_time_epoch> <latest_time_epoch>")

a = 0
b = 0
try:
    a = int(sys.argv[1])
    b = int(sys.argv[2])
    if (b < a):
        raise ValueError

except:
    isp.parseError("Invalid earliest and/or latest epoch times")

disjuncts = []

while a < b:
    level = 10

    while a % level == 0 and (a + level) <= b:
        level = level * 10

    level = level / 10

    disjuncts.append('_indextime=%d%s' % (a / level, level > 1 and '*' or ''))
Exemple #19
0
 def _handle_error(self, msg):
     si.parseError(msg)