Exemple #1
0
def main():
    client = actconfig.setup()

    # Parse arguments
    opts, kwargs = Intersplunk.getKeywordsAndOptions()

    if not opts:
        Intersplunk.generateErrorResult(
            "Usage: | actadd <field1> ... <fieldN> [fact_type=<fact type>] [fact_value=<fact value]"
        )
        return

    events, _, _ = Intersplunk.getOrganizedResults()

    # Annotate events
    for event in events:
        object_value = []
        for field in opts:
            if event.get(field):
                object_value.append(event[field])

        if not object_value:
            continue

        event.update(fact_search(client, object_value, **kwargs))

    Intersplunk.outputResults(events)
Exemple #2
0
def run(results, fields):

    try:
        values = set()
        for result in results:
            field = None
            for f, v in result.items():
                if f not in ['count', 'percent']:
                    field = f
                    break
            else:
                continue
            value = result[field]
            if value.lower() == "other":
                value = ' '.join([
                    'NOT %s="%s" ' % (field, v.replace('"', '\\"'))
                    for v in values
                ]) + ' %s=*' % field
            elif value.lower() == "null":
                value = 'NOT %s=*' % field
            else:
                values.add(value)
                value = '%s="%s"' % (field, v.replace('"', '\\"'))

            result['_drilldown'] = value

        if '_drilldown' not in fields:
            fields.append('_drilldown')

        si.outputResults(results, {}, fields=fields)
    except Exception, e2:
        stack2 = traceback.format_exc()
        si.generateErrorResults("Error '%s'. %s" % (e2, stack2))
Exemple #3
0
def error(msg):
    # for some reason the old style generateErrorResults aren't making their way into the ui.
    #    si.generateErrorResults("Usage: searchtxn <transaction_type> <transaction_search>. Ex: searchtxn loginsessions user=bob")
    messages = {}
    si.addErrorMessage(messages, msg)
    si.outputResults([], messages)
    exit(0)
Exemple #4
0
def error(msg):
    # for some reason the old style generateErrorResults aren't making their way into the ui.
    #    si.generateErrorResults("Usage: searchtxn <transaction_type> <transaction_search>. Ex: searchtxn loginsessions user=bob")
    messages = {}
    si.addErrorMessage(messages, msg)
    si.outputResults([], messages)
    exit(0)
Exemple #5
0
def run(results, fields):
    
    try:
        values = set()
        for result in results:
            field = None
            for f,v in result.items():
                if f not in ['count','percent']:
                    field = f
                    break
            else:
                continue
            value = result[field]
            if value.lower() == "other":
                value = ' '.join(['NOT %s="%s" ' % (field, v.replace('"','\\"')) for v in values]) + ' %s=*' % field
            elif value.lower() == "null":
                value = 'NOT %s=*' % field
            else:
                values.add(value)
                value = '%s="%s"' % (field, v.replace('"','\\"'))

            result['_drilldown'] = value

        if '_drilldown' not in fields:
            fields.append('_drilldown')

        si.outputResults(results, {}, fields=fields)
    except Exception, e2:
        stack2 =  traceback.format_exc()
        si.generateErrorResults("Error '%s'. %s" % (e2, stack2))
Exemple #6
0
def run(spantext, seriesmode, results):

    try:

        secsPerSpan, scalar, unit = parseSpan(spantext)
        maxtime = -1
        # for each results
        time_data = {}
        fields_seen = {}
        span = None
        latest = None
        for result in results:
            if maxtime < 0:
                try:
                    maxtime = int(float(result['info_max_time']))
                except:
                    maxtime = int(time.time())
                maxtime -= 1  # not inclusive
            if '_time' not in result:
                raise Exception("Missing required _time field on data")
            if span == None and '_span' in result:
                span = result['_span']
            mytime = int(float(result['_time']))
            spansago = int((maxtime - mytime) / secsPerSpan)
            new_time = mytime + (spansago * secsPerSpan)

            if new_time not in time_data:
                time_data[new_time] = {'_time': new_time, '_span': span}
            this_row = time_data[new_time]

            spanstart = maxtime - ((spansago + 1) * secsPerSpan) + 1
            series = seriesName(series_mode, scalar, spansago, unit, spanstart)
            if spansago == 0: latest = series
            acount = len(result)
            for k, v in result.items():
                if k not in [
                        '_time', 'info_sid', 'info_max_time', 'info_min_time',
                        'info_search_time', 'info_sid', '_span'
                ]:
                    if k == 'count':
                        attr = series
                    else:
                        attr = '%s_%s' % (k, series)
                    this_row[attr] = result[k]
                    fields_seen[attr] = spansago

        field_order = fields_seen.items()
        field_order.sort(lambda x, y: cmp(x[1], y[1]))
        field_order = [f for f, v in field_order]
        field_order.insert(0, '_time')
        field_order.append('_span')

        results = time_data.values()
        results.sort(lambda x, y: cmp(x['_time'], y['_time']))

        si.outputResults(results, {}, fields=field_order)
    except Exception, e2:
        stack2 = traceback.format_exc()
        si.generateErrorResults("Error '%s'. %s" % (e2, stack2))
Exemple #7
0
def run(spantext, seriesmode, results):
    
    try:

        secsPerSpan, scalar, unit = parseSpan(spantext)
        maxtime = -1
        # for each results
        time_data = {}
        fields_seen = {}
        span = None
        latest = None
        for result in results:
            if maxtime < 0:
                try:
                    maxtime = int(float(result['info_max_time']))
                except:
                    maxtime = int(time.time())
                maxtime -= 1 # not inclusive
            if '_time' not in result:
                raise Exception("Missing required _time field on data")
            if span == None and '_span' in result:
                span = result['_span']
            mytime = int(float(result['_time']))  
            spansago =  int((maxtime-mytime) / secsPerSpan)
            new_time = mytime + (spansago * secsPerSpan)

            if new_time not in time_data:
                time_data[new_time] = { '_time': new_time, '_span': span }
            this_row = time_data[new_time]

            spanstart = maxtime - ((spansago+1)*secsPerSpan) + 1
            series = seriesName(series_mode, scalar, spansago, unit, spanstart)
            if spansago == 0: latest = series
            acount = len(result)
            for k,v in result.items():
                if k not in ['_time', 'info_sid', 'info_max_time', 'info_min_time', 'info_search_time', 'info_sid', '_span']:
                    if k == 'count':
                        attr = series
                    else:
                        attr = '%s_%s' % (k, series)
                    this_row[attr] = result[k]
                    fields_seen[attr] = spansago

        field_order = fields_seen.items()
        field_order.sort(lambda x,y: cmp(x[1], y[1]))
        field_order = [f for f,v in field_order]
        field_order.insert(0,'_time')
        field_order.append('_span')

        results = time_data.values()
        results.sort(lambda x,y: cmp(x['_time'], y['_time']))

        si.outputResults(results, {}, fields=field_order)
    except Exception, e2:
        stack2 =  traceback.format_exc()
        si.generateErrorResults("Error '%s'. %s" % (e2, stack2))
Exemple #8
0
def main():
    try:
        search_results, dummy_results, settings = intersplunk.getOrganizedResults()
        if len(search_results) > 0:
            output_results = cal_utilization(search_results)
            intersplunk.outputResults(output_results, fields=output_results[0].keys())
    except:
        stack = traceback.format_exc()
        results = intersplunk.generateErrorResults("Error : Traceback: " + str(stack))
        intersplunk.outputResults(results)
Exemple #9
0
def main():
    # get config from config file
    config = ConfigParser.ConfigParser()
    
    if os.path.exists(os.path.join('..', 'local', 'slack.conf')):
        config.readfp(open(os.path.join('..', 'local', 'slack.conf')))
    else:
        config.readfp(open(os.path.join('..', 'default', 'slack.conf')))

    # username and icon can only be set by conf
    username = config.get('config', 'username')
    icon = config.get('config', 'icon')

    # update args if user speicify them in search
    channel = kwargs.get('channel', config.get('config', 'channel'))
    if not channel.startswith('#'): channel = '#' + channel
    if config.get('config', 'allow_user_set_slack_url').lower() in TRUE_VALUES:
        url = kwargs.get('url', config.get('config', 'url'))
    else:
        url = config.get('config', 'url')

    # no url specified, dont procceed.
    if not url:
        raise Exception("Not slack url specified!")

    # read search results
    results = sis.readResults(None, None, True)

    https_proxy = config.get('config', 'proxy')
    proxyDict = { 
                  "https" : https_proxy
                }

    # prepare data to be sent to slack
    data = {
        'text': get_pretty_table(results),
        'username': username,
        'channel': channel,
        'icon_url': icon,
        'mrkdwn': True,
    }

    if https_proxy != "":  
        # send data to slack.
        r = requests.post(url, data=json.dumps(data), proxies=proxyDict)
    else:
        r = requests.post(url, data=json.dumps(data))

    if r.status_code == 200:
        sis.outputResults(results)
    else:
        err_msg = ("Error sending results to slack, reason: {r}, {t}".format( 
                    r=r.reason, t=r.text))
        sis.generateErrorResults(err_msg)
Exemple #10
0
def main():
    # get config from config file
    config = ConfigParser.ConfigParser()
    config.readfp(open(os.path.join('..', 'default', 'hipchat.conf')))

    # update args if user speicify them in search
    room    = kwargs.get('room', config.get('default', 'room'))
    color   = kwargs.get('color', config.get('default', 'color'))
    notify  = kwargs.get('notify', config.get('default', 'notify'))
    msg_fmt = kwargs.get('message_format', 
                         config.get('default', 'message_format'))

    if config.get('default', 'allow_users_set_base_url').lower() in TRUE_VALUES:
        base_url = kwargs.get('base_url', config.get('default', 'base_url'))
    else:
        base_url = config.get('default', 'base_url')

    # check if auth token is set properly
    try:
        auth_token = {"auth_token": config.get(room, 'auth_token')}
    except ConfigParser.NoSectionError as e:
        raise Exception("Room not set, please set the room stanza")
    except ConfigParser.NoOptionError as e:
        raise Exception("Auth token not set, please set auth token for room")

    # construct url
    url = base_url + "{s}{r}/notification".format(
        s='' if base_url.endswith('/') else '/', r=room)

    # read search results
    results = sis.readResults(None, None, True)

    # prepare data to be sent
    data = {
        'message': get_pretty_table(results, msg_fmt),
        'message_format': msg_fmt,
        'color': color,
        'notify': notify.lower() in TRUE_VALUES
    }

    # send data
    headers = {'Content-type': 'application/json'}
    r = requests.post(url, 
        data=json.dumps(data), 
        params=auth_token, 
        headers=headers)

    if r.status_code == 204:
        sis.outputResults(results)
    else:
        err_msg = ("Error sending results to slack, reason: {r}, {t}".format( 
                    r=r.reason, t=r.text))
        sis.generateErrorResults(err_msg)
Exemple #11
0
def main():
    # get config from config file
    config = ConfigParser.ConfigParser()

    if os.path.exists(os.path.join('..', 'local', 'slack.conf')):
        config.readfp(open(os.path.join('..', 'local', 'slack.conf')))
    else:
        config.readfp(open(os.path.join('..', 'default', 'slack.conf')))

    # username and icon can only be set by conf
    username = config.get('config', 'username')
    icon = config.get('config', 'icon')

    # update args if user speicify them in search
    channel = kwargs.get('channel', config.get('config', 'channel'))
    if not channel.startswith('#'): channel = '#' + channel
    if config.get('config', 'allow_user_set_slack_url').lower() in TRUE_VALUES:
        url = kwargs.get('url', config.get('config', 'url'))
    else:
        url = config.get('config', 'url')

    # no url specified, dont procceed.
    if not url:
        raise Exception("Not slack url specified!")

    # read search results
    results = sis.readResults(None, None, True)

    https_proxy = config.get('config', 'proxy')
    proxyDict = {"https": https_proxy}

    # prepare data to be sent to slack
    data = {
        'text': get_pretty_table(results),
        'username': username,
        'channel': channel,
        'icon_url': icon,
        'mrkdwn': True,
    }

    if https_proxy != "":
        # send data to slack.
        r = requests.post(url, data=json.dumps(data), proxies=proxyDict)
    else:
        r = requests.post(url, data=json.dumps(data))

    if r.status_code == 200:
        sis.outputResults(results)
    else:
        err_msg = ("Error sending results to slack, reason: {r}, {t}".format(
            r=r.reason, t=r.text))
        sis.generateErrorResults(err_msg)
Exemple #12
0
def main():
    client = actconfig.setup()

    # Parse arguments from splunk search
    opts, kwargs = Intersplunk.getKeywordsAndOptions()

    results = []

    if opts and "keywords" not in kwargs:
        kwargs["keywords"] = " ".join(opts)

    results += fact_search(client, **kwargs)
    Intersplunk.outputResults(results)
Exemple #13
0
def main():
    try:
        output_fields = ['_time']
        output_results = []
        search_results, dummyresults, settings = intersplunk.getOrganizedResults(
        )
        if search_results is None or len(search_results) == 0:
            intersplunk.outputResults(output_results, fields=output_fields)
            return

        fields = search_results[0].keys()
        is_field_valid, is_detection_needed = check_fields(fields)
        if not is_field_valid:
            intersplunk.parseError(
                'This visualization requires timestamped, evenly spaced numeric time-series data. Try using the timechart command in your query.'
            )

        if not is_detection_needed:
            intersplunk.outputResults(search_results,
                                      fields=search_results[0].keys())
            return

        output_results, output_fields = wrap_anomaly_detection(search_results)
        intersplunk.outputResults(output_results, fields=output_fields)
    except:
        stack = traceback.format_exc()
        results = intersplunk.generateErrorResults("Error : Traceback: " +
                                                   str(stack))
        intersplunk.outputResults(results)
Exemple #14
0
def return_results(module):
    try:
        results, dummy_results, settings = InterSplunk.getOrganizedResults()

        if isinstance(results, list) and len(results) > 0:
            new_results = module.process_iocs(results)
        elif len(sys.argv) > 1:
            new_results = module.process_iocs(None)
    except:
        stack = traceback.format_exc()
        new_results = InterSplunk.generateErrorResults("Error: " + str(stack))

    InterSplunk.outputResults(new_results)
    return
    def _do_handle(self):
        self.logger.info("Start of ServiceNow script")

        results = []
        for event in self._get_events():
            if event is None:
                break

            result = self._handle_event(event)
            if result:
                result["_time"] = time.time()
                results.append(result)
        si.outputResults(results)

        self.logger.info("End of ServiceNow script")
Exemple #16
0
def main():
    p = pdns.PDNS()

    # Parse arguments from splunk search
    opts, kwargs = Intersplunk.getKeywordsAndOptions()
    limit = int(kwargs.get("limit", 25))

    events, _, _ = Intersplunk.getOrganizedResults()

    # Annotate events
    for event in events:
        value = []
        for field in opts:
            if event.get(field):
                value.append(event[field])

        if not value:
            continue

        query = {}
        answer = {}
        for val in value:
            try:
                for res in p.query(val, limit=limit):
                    if res["query"] != value:
                        query[res["query"]] = True
                    if res["answer"] != value:
                        answer[res["answer"]] = True
            except pdns.connectionError as e:
                Intersplunk.parseError(str(e))
                return
            except pdns.resourceLimitExceeded as e:
                Intersplunk.parseError(str(e))
                return

        if query:
            if "query" not in event:
                event["query"] = query.keys()

        if answer:
            if "answer" not in event:
                event["answer"] = answer.keys()

    Intersplunk.outputResults(events)
Exemple #17
0
def main(dist, env):
    pm_config, config = load_pyden_config()
    pyden_location = pm_config.get('appsettings', 'location')
    if dist:
        if dist in config.sections():
            write_pyden_config(pyden_location, config, "default-pys",
                               "distribution", dist)
        else:
            Intersplunk.generateErrorResults(
                "The Python version %s is not installed yet." % dist)
            sys.exit(1)
    if env:
        if env in config.sections():
            write_pyden_config(pyden_location, config, "default-pys",
                               "environment", env)
        else:
            Intersplunk.generateErrorResults(
                "The virtual environment %s does not exist." % env)
            sys.exit(1)
    Intersplunk.outputResults([{"message": "Successfully changed defaults"}])
Exemple #18
0
def main():
    """ """
    lookup_path = '/opt/splunk/etc/apps/osweep/lookups'
    file_path   = '{}/urlhaus_url_feed.csv'.format(lookup_path)

    if sys.argv[1].lower() == 'feed':
        data_feed = urlhaus.get_feed()
        urlhaus.write_file(data_feed, file_path)
        exit(0)

    try:
        results, dummy_results, settings = InterSplunk.getOrganizedResults()
        
        if isinstance(results, list) and len(results) > 0:
            new_results = process_master(results)
        elif len(sys.argv) > 1:
            new_results = process_master(None)
    except:
        stack = traceback.format_exc()
        new_results = InterSplunk.generateErrorResults("Error: " + str(stack))

    InterSplunk.outputResults(new_results)
    return
Exemple #19
0
def main():
    p = pdns.PDNS()

    # Parse arguments from splunk search
    opts, kwargs = Intersplunk.getKeywordsAndOptions()

    # Get limit from kwargs, but default to 25 if not specified
    limit = int(kwargs.get("limit", 25))

    results = []

    for value in opts:
        try:
            result = p.query(value, limit = limit)
        except pdns.connectionError as e:
            Intersplunk.parseError(str(e))
            return
        except pdns.resourceLimitExceeded as e:
            Intersplunk.parseError(str(e))
            return

        results += result
    Intersplunk.outputResults(results)
Exemple #20
0
def run(messages, count, mapping):

    results = si.readResults(None, None, True)

    ORS = []
    seenValues = set()  # dedup rows
    for i, result in enumerate(results):
        if count > 0 and i >= count:
            break
        ANDS = []
        for j, (renamed, attr) in enumerate(mapping):
            val = str(result.get(attr, ''))
            if renamed == None or renamed == '':
                if val != '':
                    ANDS.append(val)
            else:
                ANDS.append('%s="%s"' % (renamed, val))
        andstr = str(ANDS)
        if len(ANDS) > 0 and andstr not in seenValues:
            ORS.append(ANDS)
            seenValues.add(andstr)

    output = ""
    if len(ORS) > 1:
        output += "("
    for i, OR in enumerate(ORS):
        if i > 0:
            output += ") OR ("
        for j, AND in enumerate(OR):
            if j > 0:
                output += " "  #" AND "
            output += AND
    if len(ORS) > 1:
        output += ")"

    si.outputResults([{'search': output}], messages)
Exemple #21
0
def run(messages, count, mapping):
    
    results = si.readResults(None, None, True)

    ORS = []
    seenValues = set() # dedup rows
    for i, result in enumerate(results):
        if count > 0 and i >= count:
            break
        ANDS = []
        for j, (renamed, attr) in enumerate(mapping):
            val = str(result.get(attr,''))
            if renamed == None or renamed == '':
                if val != '':
                    ANDS.append(val)
            else:
                ANDS.append('%s="%s"' % (renamed, val))
        andstr = str(ANDS)        
        if len(ANDS) > 0 and andstr not in seenValues:            
            ORS.append(ANDS)
            seenValues.add(andstr)
                
    output = ""
    if len(ORS) > 1:
        output += "("
    for i, OR in enumerate(ORS):
        if i > 0:
            output += ") OR ("
        for j, AND in enumerate(OR):
            if j > 0:
                output += " " #" AND "
            output += AND
    if len(ORS) > 1:
        output += ")"

    si.outputResults([{'search': output}], messages)
Exemple #22
0
def main():
    try:
        search_results, dummyresults, settings = intersplunk.getOrganizedResults(
        )
        output_fields = ['_time', '_span']
        output_results = []
        if search_results is None or len(search_results) == 0:
            intersplunk.outputResults(output_results, fields=output_fields)
        else:
            fields = search_results[0].keys()
            detected_fields = list(
                filter(lambda x: x != '_time' and x != '_span', fields))
            search_results_length = range(len(search_results))
            timestamp = [
                int(str(search_results[i]['_time']))
                for i in search_results_length
            ]
            output_results = [{
                '_time': timestamp[i],
                '_span': search_results[i]['_span']
            } for i in search_results_length]
            for cur_field in detected_fields:
                data = [
                    str(search_results[i][cur_field])
                    for i in search_results_length
                ]
                if preprocess(data, timestamp, search_results_length,
                              output_results, cur_field):
                    output_fields.append(cur_field)

            intersplunk.outputResults(output_results, fields=output_fields)
    except:
        stack = traceback.format_exc()
        results = intersplunk.generateErrorResults("Error : Traceback: " +
                                                   str(stack))
        intersplunk.outputResults(results)
Exemple #23
0
            if arg_on_and_enabled(argvals, "metadata", is_bool=True):
                logger.info(
                    'arg=metadata is set to true, will return feed metadata only; will ignore all other arguments/settings'
                )
                quandl_show_info = True
                quandl_uri = "https://www.quandl.com/api/v3/datasets/%s/%s/metadata.json" % (
                    quandl_database, set)
            logger.debug('effective uri="%s"' % quandl_uri)

            create_time = True
            if arg_on_and_enabled(argvals,
                                  "convert_time",
                                  rex="^(?:f|false|0|no)$"):
                create_time = False

            set_payload = getDataPayload(quandl_uri)
            if set_payload is not "":
                quandl_data = json.loads(set_payload)
                uber += quandl2splunk(quandl_data, quandl_show_info,
                                      create_time)
        # keeping all data into single array is waste of memory; need to figure out how to call outputResults multiple times, without adding header each time
        logger.info('sending events to splunk count="%s"' % len(uber))
        si.outputResults(uber)
    except Exception, e:
        logger.error('error while processing events, exception="%s"' % e)
        si.generateErrorResults(e)
        raise Exception(e)
    finally:
        logger.info('exiting, execution duration=%s seconds' %
                    (time.time() - eStart))
        for value_field in value_fields:
            cur_field = value_field[6:]
            outlier_field = 'outlier_' + cur_field
            severity_field = 'severity_' + cur_field
            if outlier_field in search_results[i] and str(search_results[i][outlier_field]) == 'True':
                if len(str(search_results[i][severity_field])) > 0:
                    severity_value = search_results[i][severity_field]
                else:
                    severity_value = -1
                cur_row = {'Field name': cur_field, 'Value': input[i][value_field], 'Severity': severity_value}
                cur_row.update({k: input[i][k] for k in OUTPUT_ATTRIBUTE_FIELDS if k in input[i]})
                output.append(cur_row)

    sorted(output, key=comparator)
    return output


try:
    output_fields = ['_time', 'Job name', 'Field name', 'Value', 'Severity']
    output_results = []
    search_results, dummyresults, settings = intersplunk.getOrganizedResults()
    if search_results is None or len(search_results) == 0:
        intersplunk.outputResults(output_results)

    output_results = parse_table(search_results)
    intersplunk.outputResults(output_results[:OUTPUT_COUNT], fields=OUTPUT_ATTRIBUTE_FIELDS.extend(['Field name', 'Value', 'Severity']))
except:
    stack = traceback.format_exc()
    results = intersplunk.generateErrorResults("Error : Traceback: " + str(stack))
    intersplunk.outputResults(results)
Exemple #25
0
 def _do_handle(self):
     self._logger.info("Start of %s", self._get_log_file())
     result = self._handle_events()
     si.outputResults(result)
     self._logger.info("End of %s", self._get_log_file())
Exemple #26
0
            output += AND
    if len(ORS) > 1:
        output += ")"

    si.outputResults([{'search': output}], messages)


if __name__ == '__main__':
    messages = {}
    try:
        (isgetinfo, sys.argv) = si.isGetInfo(sys.argv)
        argtext = ' '.join(sys.argv[1:])
        count, mapping = parseArgs(argtext)

        if isgetinfo:
            reqsop = True
            preop = "head %s" % count
            fields = [field for alias, field in mapping]
            if len(fields) > 0:
                preop += " | fields %s" % ', '.join(fields)
            si.outputInfo(False, False, False, reqsop,
                          preop)  # calls sys.exit()
        run(messages, count, mapping)

    except Exception, e:
        import traceback
        stack = traceback.format_exc()
        si.addErrorMessage(
            messages, "%s. Traceback: %s" % (e, stack.replace('\n', '\\n')))
        si.outputResults([], messages)
Exemple #27
0
#!/opt/splunk/bin/python
############################################################
#
# GET /agents/summary
#
############################################################
import sys
import splunk.Intersplunk as si
import requests
import json

try:
    request = requests.get(
        sys.argv[1] + "/en-US/custom/SplunkAppForWazuh/agents/summary?ip=" +
        sys.argv[2] + "&port=" + sys.argv[3] + "&user="******"&pass="******"Error : Traceback: " + str(stack))

si.outputResults(data)
Exemple #28
0
from utils import load_pyden_config
from splunk import Intersplunk
import re
from splunk_logger import setup_logging

if __name__ == "__main__":
    logger = setup_logging()
    pm_config, config = load_pyden_config()
    pyden_location = pm_config.get('appsettings', 'location')
    sections = config.sections()
    logger.debug(sections)
    if "default-pys" in sections:
        sections.remove("default-pys")
    regex = re.compile(r"""\d\.\d{1,2}\.\d{1,2}""")
    venvs = [env for env in sections if not regex.match(env)]
    results = [{"environment": env} for env in venvs]
    for result in results:
        result['version'] = config.get(result['environment'], 'version')
        result["is_default"] = 1 if result['environment'] == config.get(
            "default-pys", "environment") else 0

    Intersplunk.outputResults(results)
Exemple #29
0
def main():
    try:
        search_results, dummyresults, settings = intersplunk.getOrganizedResults(
        )
        session_key = settings['sessionKey']
        if len(sys.argv) == 2:
            # update aws price info
            if sys.argv[1] == 'info':
                task = AwsInfoTask(session_key)
                task.execute()
        elif len(sys.argv) == 5:
            # obtain price detail
            region = sys.argv[1]
            instance_type = sys.argv[2]
            product_os = sys.argv[3]
            tenancy = sys.argv[4]
            on_demand_hourly, reserved_one_all_yearly, reserved_one_partial_yearly, reserved_one_partial_hourly, reserved_one_no_hourly, currency = read_price(
                region, instance_type, product_os, tenancy, session_key)

            intersplunk.outputResults([{
                PRICE_ON_DEMAND_HOURLY: on_demand_hourly,
                PRICE_RESERVED_ONE_ALL_YEARLY: reserved_one_all_yearly,
                PRICE_RESERVED_ONE_PARTIAL_YEARLY: reserved_one_partial_yearly,
                PRICE_RESERVED_ONE_PARTIAL_HOURLY: reserved_one_partial_hourly,
                PRICE_RESERVED_ONE_NO_HOURLY: reserved_one_no_hourly,
                CURRENCY: currency
            }],
                                      fields=[
                                          PRICE_ON_DEMAND_HOURLY,
                                          PRICE_RESERVED_ONE_ALL_YEARLY,
                                          PRICE_RESERVED_ONE_PARTIAL_YEARLY,
                                          PRICE_RESERVED_ONE_PARTIAL_HOURLY,
                                          PRICE_RESERVED_ONE_NO_HOURLY,
                                          CURRENCY
                                      ])
        elif len(sys.argv) == 7:
            # calculate optimal RI, RI cost and on demand cost
            base = sys.argv[1]
            region = sys.argv[2]
            instance_type = sys.argv[3]
            purchase_option = sys.argv[4]
            product_os = sys.argv[5]
            tenancy = sys.argv[6]

            valid_days, message = get_valid_days_from_conf(session_key)
            if valid_days < 0:
                ri = 'N/A'
                ri_cost = 'N/A'
                instance_hours = []
                on_demand_hourly = 0
                currency = '$' if re.match(
                    r'cn-.*', region) == None else '\xc2\xa5'.decode('utf8')
            else:
                history_len, instance_hours = get_instance_hours(
                    base, search_results)
                # read price
                on_demand_hourly, reserved_one_all_yearly, reserved_one_partial_yearly, reserved_one_partial_hourly, reserved_one_no_hourly, currency = read_price(
                    region, instance_type, product_os, tenancy, session_key)

                if valid_days * HOURS_OF_DAY > history_len:
                    ri = 'N/A'
                    ri_cost = 'N/A'
                    message = 'It\'s required to have %d days\' data at least. You can update the setting in recommendation.conf' % (
                        valid_days)
                else:
                    if purchase_option == 'all':
                        ri, ri_cost, message = ri_wrap(
                            instance_hours, on_demand_hourly,
                            reserved_one_all_yearly / HOURS_OF_YEAR)
                    elif purchase_option == 'partial':
                        ri, ri_cost, message = ri_wrap(
                            instance_hours, on_demand_hourly,
                            reserved_one_partial_yearly / HOURS_OF_YEAR +
                            reserved_one_partial_hourly)
                    else:
                        ri, ri_cost, message = ri_wrap(instance_hours,
                                                       on_demand_hourly,
                                                       reserved_one_no_hourly)

            instance_hours_len = max(1, len(instance_hours))
            outputResults = []
            cur_line = {}
            cur_line[ON_DEMAND_COST] = int(
                round(on_demand_hourly * sum(instance_hours) /
                      instance_hours_len * HOURS_OF_YEAR))  # on demand cost
            cur_line[RI] = ri
            cur_line[RI_COST] = 'N/A' if ri_cost == 'N/A' else int(
                round(ri_cost / instance_hours_len * HOURS_OF_YEAR))  # RI cost
            cur_line[MESSAGE] = message
            cur_line[CURRENCY] = currency
            outputResults.append(cur_line)
            intersplunk.outputResults(
                outputResults,
                fields=[RI, RI_COST, ON_DEMAND_COST, MESSAGE, CURRENCY])
        else:
            intersplunk.parseError(
                "Arguments should be recommendation base, AZ, instance type, purchase option, os and tenancy."
            )
    except:
        stack = traceback.format_exc()
        results = intersplunk.generateErrorResults("Error : Traceback: " +
                                                   str(stack))
        intersplunk.outputResults(results)
stdinArgs = urllib.unquote(stdinArgs).decode('utf8')
match = re.search(r'<authToken>([^<]+)</authToken>', stdinArgs)
sessionKey = match.group(1)

incident_id = sys.argv[1]

query = {}
query['incident_id'] = incident_id
uri = '/servicesNS/nobody/alert_manager/storage/collections/data/incident_results?query=%s' % urllib.quote(json.dumps(query))
serverResponse, serverContent = rest.simpleRequest(uri, sessionKey=sessionKey)

data = json.loads(serverContent)
#sys.stderr.write("data: %s" % data)

field_list = None
results = []
for result in data:
    if "field_list" in result:
        field_list = result["field_list"]

    for line in result["fields"]:
        if type(field_list) is list:
            ordered_line = collections.OrderedDict()
            for field in field_list:
                ordered_line[field] = line[field]
            results.append(ordered_line)
        else:
            results.append(line)

intersplunk.outputResults(results)
Exemple #31
0
    for i in range(1, len(sys.argv)):
        func = py.parse_func(sys.argv[i])
        logger.debug("func = %s" % func)    
        recs = get_inputs(records, func.arguments)
        logger.debug("get_inputs = %s" % recs)    
        
        f = py.find_func(func)
        f._sessionKey_ = sessionKey
        try:
            if len(func.arguments)==0:
                rf = f()
            else:
                rf = f(*tuple(recs))
        except Exception as ex:
            logger.critical("fneval: ex = %s" % ex)
            print "%s" % ex
            rf = ex
            
        logger.debug("rf = %s" % rf)
        
        nm = func.name
        if func.alias!=None: nm = func.alias
        ret[nm] = rf

    logger.debug("ret = %s" % ret)
    logger.debug("records = %s" % records)
    

    si.outputResults([ret])
    
Exemple #32
0
        logger.debug('Ending Output')

        #if not proc_err == '':
        #    raise Exception("prestrace returned something in stderr: '%s'" % proc_err)

        lines = proc_err.split("\n")
        
        results = []

	re_time = re.compile(r'^(\d+\.\d\d\d)\d+\s+(\w+).*\<(\d+\.\d+)\>')

        for line in lines:
            if line != '':
                res = {}
                res['_raw'] = line
		m = re_time.match(line)
		if m:
  			res['_time'] = m.group(1)
  			res['call'] = m.group(2)
  			res['duration'] = m.group(3)
                results.append(res)
        logger.debug(pprint.pformat(results))
        si.outputResults(results, fields=['_raw'])
        logger.debug("exited __main__")
    except Exception, e:
        si.generateErrorResults(e)




Exemple #33
0

if __name__ == '__main__':
    
    stdin = None
    if not os.isatty(0):
        stdin = sys.stdin

    settings = dict()
    records = si.readResults(settings = settings, has_header = True)
    sessionKey = settings['sessionKey']


    for i in range(1, len(sys.argv)):
        logger.debug("query = %s" % sys.argv[i])
        func = parse_func(sys.argv[i])
        logger.debug("func arguments = %s" % func.arguments)
        
        for rec in records:
            # it is good pratice to always pass sessionKey to functions
            rf = run_func (func, sessionKey=sessionKey, **rec)
            
            if isinstance(rf, dict):
                rec.update(rf)
            else:
                nm = func.alias if func.alias!=None else func.name
                rec.update({nm:rf})

    si.outputResults(records)
 
Exemple #34
0
        results = []

        now = str(int(time.mktime(time.gmtime())))
        start = 0
        # google rest api returns very few results, get many pages of a small number of results
        for loop in range(0, 20):
            try:
                # Define the query to pass to Google Search API
                query = urllib.urlencode({'q' : search, 'start' : start})
                # Fetch the results and convert to JSON
                search_results = urllib2.urlopen(GOOGLE_REST_URL % query)
                data = json.loads(search_results.read())
                hits = data['responseData']['results']
                for h in hits:
                    raw = stripCommonHTML(sax.unescape(h['content']))
                    title = stripCommonHTML(h['titleNoFormatting'])
                    url = h['unescapedUrl']
                    results.append({'title' : title , 'url' : url , '_time' : now, 'description' : raw, '_raw' : title + "... " + raw})
                start += len(hits)
                if len(results) > maxresults:
                    break
            except:
                break
        si.outputResults(results[:maxresults])
    except Exception, e:
        import traceback
        stack =  traceback.format_exc()
        si.generateErrorResults("Error '%s'. %s" % (e, stack))


Exemple #35
0
						+"* DateTime: " + datetime\
						+"\r\n\r\n* Sensors: "+sensors\
						+"\r\n\r\n* Signatures:\r\n\r\n"+signatures\
						+"\r\n\r\n* PCAP Attached: "\
						+"\r\n\r\n**SOURCE**\r\n\r\n"\
						+"\r\n Source IP(s): "+sourceip\
						+"\r\n X-Forward-For : (Please Confirm with PCAP) "+xforward\
						+"Ports: "+sport\
						+"\r\n Hostname: "\
						+"\r\n\r\n**Destination**\r\n\r\n"\
						+"\r\n Destination IP(s): "+destip\
						+"Ports: "+dport\
						+"\r\n Hostname: "\
						+"\r\n\r\n**Add RAW data as a Nugget**\r\n\r\n",
								"plan" : "0",
								"concerned_business_lines" : [1],
								"severity": "3",
								"is_incident" : "true",
								"status" : "O",
								"subject": "IDS Event:  "
						}		
        			response = requests.post(FIR_API_URL+"incidents", headers=headers, data=json.dumps(data), verify=False)
                    outputresults.append({'fir_api' : result['dest'], 'push_status' : 'ok'})	
            	i=1  											    
        si.outputResults(outputresults)

    except Exception, e:
        import traceback
        stack =  traceback.format_exc()
        si.generateErrorResults("Error '%s'. %s" % (e, stack))
Exemple #36
0
        # e.g., '/data/inputs/monitor'
        entity = keywords[0]
        logger.info("Entity: %s Args: %s" % (entity, args))

        results = []  # we don't care about incoming results
        try:
            entitys = en.getEntities(entity, sessionKey=sessionKey, owner=owner, namespace=namespace, count=-1)
            for name, entity in entitys.items():
                try:
                    myapp = entity["eai:acl"]["app"]
                    if namespace != None and myapp != namespace:
                        continue
                except:
                    continue  # if no eai:acl/app, filter out
                result = entityToResult(name, entity)
                results.append(result)
        except splunk.ResourceNotFound, e2:
            pass
        si.outputResults(results, messages)
    except Exception, e:
        import traceback

        stack = traceback.format_exc()
        logger.error(str(e) + ". Traceback: " + str(stack))
        si.generateErrorResults(str(e))


if __name__ == "__main__":
    execute()
    gpath = gpath.replace('\]', ']')
    # find all files matching
    complete_path = os.path.expanduser(
        os.path.expandvars(gpath))
    glob_matches = glob.glob(complete_path)
    logger.error("complete path: %s" % complete_path)
    logger.error("glob matches: %s" % glob_matches)

    if len(glob_matches)==0:
        logger.error("No file matching %s" % complete_path)
        raise Exception("No files matching %s." % complete_path)


    for pfile in glob_matches:
        logger.error("parsing file: %s" % pfile)
        results += parse_pstack_file(pfile, separator, fileorderindex)


    #return results
    return results

# noinspection PyUnreachableCode
if __name__ == '__main__':
    try:
        si.outputResults(parse_pstacks(), messages, fields)
    except Exception, e:
        import traceback
        stack = traceback.format_exc()
        si.generateErrorResults("Following error occurred while parsing pstack: '%s'." % (e))
        logger.error("%s. %s" % (e, stack))
Exemple #38
0
    log("MAXTERMS: %s MAKEORS: %s eventsOnly: %s" % (maxTerms, makeORs, eventsOnly))
    log("tsearch: %s" % tsearch)

    results = []
    try:
        results = findTransaction(tname, tsearch, makeORs, eventsOnly, maxTerms, messages, **kwargs)
    except Exception, e:
        error(e)

    events = []
    log("RESULTS: %s" % len(results))
    for result in results:  # api fail
        event = {}
        for field in result:
            if field == '_time':
                event['_time'] = util.dt2epoch(util.parseISO(str(result['_time'])))
            else:
                event[field] = result[field]
        events.append(event)

    si.outputResults(events, messages)


if __name__ == '__main__':
    try:
        main()
    except Exception, e:
        error(e)
        
        if len(regexes) == 0:
            si.generateErrorResults(
                'Unable to learn any extractions.  Provide different examples, counterexamples, or searchresults'
            )
            exit(0)

        rex = regexes[0]

        rex = rex.replace("?P<FIELDNAME>", "?P<%s>" % keywords[0])
        si.addInfoMessage(
            messages,
            'Successfully learned regex.  Consider using: | rex "%s"' %
            rex.replace('"', '\\"'))

        # for each result
        for result in results:
            val = result.get(fromfield, None)
            # match regex and put values in
            match = re.search(rex, val)
            if match:
                extractions = match.groupdict()
                for k, v in extractions.items():
                    result[k] = v

        si.outputResults(results, messages)
    except Exception, e:
        stack = traceback.format_exc()
        si.generateErrorResults("Error '%s'" % e)
        logger.error("Exception encountered: %s" % e)
        logger.info("Traceback: %s" % stack)
        "Usage: indextimerange <earliest_time_epoch> <latest_time_epoch>")

a = 0
b = 0
try:
    a = int(sys.argv[1])
    b = int(sys.argv[2])
    if (b < a):
        raise ValueError

except:
    isp.parseError("Invalid earliest and/or latest epoch times")

disjuncts = []

while a < b:
    level = 10

    while a % level == 0 and (a + level) <= b:
        level = level * 10

    level = level / 10

    disjuncts.append('_indextime=%d%s' % (a / level, level > 1 and '*' or ''))

    a = a + level

results = [{'search': " OR ".join(disjuncts)}]

isp.outputResults(results)
Exemple #41
0
def main():
    output = []

    def Log(s, verb_level, code=1, extra=None, force_print=False):
        if verb_level <= log.getverbosity():
            output.extend(s.split("\n"))

    # def PrintCollectionStatus(col_stats, force_print=False):
    #     # raise ValueError(type(col_stats.matched_chain_pair[1]))
    #     output.append({
    #         "num_backup_sets":
    #     })

    # log.PrintCollectionStatus = PrintCollectionStatus

    results = None
    try:
        settings = dict()
        Intersplunk.readResults(None, settings, True)

        dup_time.setcurtime()

        archive_dir = os.path.join(app_dir, "local", "data", "archive")

        try:
            os.makedirs(archive_dir)
        except:
            pass

        if sys.argv[1] == "splunk-last-backups":
            ap = argparse.ArgumentParser()
            ap.add_argument("--time", type=int)
            ap.add_argument("backend")
            args = ap.parse_args(sys.argv[2:])

            dup_globals.gpg_profile = gpg.GPGProfile()
            dup_globals.gpg_profile.passphrase = os.environ["PASSPHRASE"]

            backend.import_backends()

            dup_globals.backend = backend.get_backend(args.backend)

            if dup_globals.backup_name is None:
                dup_globals.backup_name = commandline.generate_default_backup_name(
                    args.backend)

            commandline.set_archive_dir(archive_dir)

            results = []
            time = args.time
            col_stats = dup_collections.CollectionsStatus(
                dup_globals.backend, dup_globals.archive_dir_path,
                "list-current").set_values()

            try:
                sig_chain = col_stats.get_backup_chain_at_time(time)
            except dup_collections.CollectionsError:
                results.append({
                    "last_full_backup_time": 0,
                    "last_incr_backup_time": 0,
                })
            else:
                if sig_chain.incset_list:
                    last_incr_backup_time = max(
                        [incset.end_time for incset in sig_chain.incset_list])
                else:
                    last_incr_backup_time = 0

                results.append({
                    "last_full_backup_time":
                    col_stats.get_last_full_backup_time(),
                    "last_incr_backup_time":
                    last_incr_backup_time
                })
        elif sys.argv[1] == "splunk-file-list":
            ap = argparse.ArgumentParser()
            ap.add_argument("--time")
            ap.add_argument("backend")
            args = ap.parse_args(sys.argv[2:])
            args.time = int(args.time.split(".")[0])

            dup_time.setcurtime(args.time)
            dup_globals.restore_time = args.time

            dup_globals.gpg_profile = gpg.GPGProfile()
            dup_globals.gpg_profile.passphrase = os.environ["PASSPHRASE"]

            backend.import_backends()

            dup_globals.backend = backend.get_backend(args.backend)

            if dup_globals.backup_name is None:
                dup_globals.backup_name = commandline.generate_default_backup_name(
                    args.backend)

            commandline.set_archive_dir(archive_dir)

            results = []
            col_stats = dup_collections.CollectionsStatus(
                dup_globals.backend, dup_globals.archive_dir_path,
                "list-current").set_values()

            time = args.time
            sig_chain = col_stats.get_signature_chain_at_time(time)

            path_iter = diffdir.get_combined_path_iter(
                sig_chain.get_fileobjs(time))
            for path in path_iter:
                if path.difftype != u"deleted" and path.index:
                    mode = bin(path.mode)[2:]

                    perms = ""
                    for p, val in enumerate(mode):
                        if p in (0, 3, 6):
                            c = "r"
                        elif p in (1, 4, 7):
                            c = "w"
                        elif p in (2, 5, 8):
                            c = "x"

                        perms += c if int(val) else "-"

                    if path.type == "dir":
                        perms = "d" + perms
                    elif path.type == "sym":
                        perms = "l" + perms
                    else:
                        perms = "-" + perms

                    results.append({
                        "perms": perms,
                        "owner": path.stat.st_uid,
                        "group": path.stat.st_gid,
                        "size": path.stat.st_size,
                        "modtime": path.stat.st_mtime,
                        "filename": os.path.join(*path.index),
                    })
        else:
            args = ["--archive-dir", archive_dir] + sys.argv[1:]
            action = commandline.ProcessCommandLine(args)

            log.Log = Log
            try:
                dup_main.do_backup(action)
            except dup_collections.CollectionsError:
                results = []
    except SystemExit:
        pass
    except Exception as e:
        import traceback
        # sys.stderr.write(traceback.format_exc())

        Intersplunk.generateErrorResults("Traceback: %s" %
                                         traceback.format_exc())

        return

    if output and not results:
        import time

        results = [{"_raw": "\n".join(output), "_time": time.time()}]

    if results:
        try:
            Intersplunk.outputResults(results)
        except Exception:
            import traceback
            sys.stderr.write(traceback.format_exc())
            results = Intersplunk.generateErrorResults("Traceback: %s" %
                                                       traceback.format_exc())
            Intersplunk.outputResults(results)
Exemple #42
0
def main():
    if len(sys.argv) < 3:
        usage()

    tname = sys.argv[1]
    #log("args")
    #for v in sys.argv:
    #    log(v)

    options = ["max_terms", "use_disjunct", "eventsonly"]
    srchargs = []
    log("ARGS: %s" % sys.argv[2:])
    for arg in sys.argv[2:]:
        for option in options:
            if arg.startswith(option):
                break
        else:
            srchargs.append(arg)
    if len(srchargs) == 0:
        usage()

    tsearch = ' '.join(srchargs)
    log("SEARCH: %s" % tsearch)

    results, dummyresults, settings = si.getOrganizedResults()
    results = []  # we don't care about incoming results

    ########TEST#####################
    if 'sessionKey' not in settings:
        settings['owner'] = 'admin'
        settings['password'] = '******'
        settings['namespace'] = 'search'
        settings['sessionKey'] = splunk.auth.getSessionKey('admin', 'changeme')
    ########TEST####################
    kwargs = {}
    for f in ['owner', 'namespace', 'sessionKey', 'hostPath']:
        if f in settings:
            kwargs[f] = settings[f]

    messages = {}
    try:
        maxTerms = int(settings.get("max_terms", MAX_SEARCH_COMPLEXITY))
        if maxTerms > MAX_SEARCH_COMPLEXITY or maxTerms < 1:
            si.addWarnMessage(
                messages,
                "max_terms must be between 1 and %s.  Using default." %
                MAX_SEARCH_COMPLEXITY)
            maxTerms = MAX_SEARCH_COMPLEXITY
    except Exception as e:
        maxTerms = MAX_SEARCH_COMPLEXITY

    dummy, options = si.getKeywordsAndOptions()
    makeORs = isTrue(options.get("use_disjunct", "t"))
    eventsOnly = isTrue(options.get("eventsonly", "f"))

    log("MAXTERMS: %s MAKEORS: %s eventsOnly: %s" %
        (maxTerms, makeORs, eventsOnly))
    log("tsearch: %s" % tsearch)

    results = []
    try:
        results = findTransaction(tname, tsearch, makeORs, eventsOnly,
                                  maxTerms, messages, **kwargs)
    except Exception as e:
        error(e)

    events = []
    log("RESULTS: %s" % len(results))
    for result in results:  # api fail
        event = {}
        for field in result:
            if field == '_time':
                event['_time'] = util.dt2epoch(
                    util.parseISO(str(result['_time'])))
            else:
                event[field] = result[field]
        events.append(event)

    si.outputResults(events, messages)
Exemple #43
0
               if '__mv_' + k not in header:
                  header.append('__mv_' + k)
            elif len(v) == 1:
               row[k] = v[0].text

         # Add a _time field by converting updated into a timestamp. This is helpful if you're piping results to collect.
         # if 'updated' in keys:
            #updated = re.sub(r' (\+|-)\d+$', '', elem.findtext('updated')) 
            #timestamp = time.mktime(datetime.datetime.strptime(updated, "%a, %d %b %Y %H:%M:%S").timetuple())
            #row['_time'] = timestamp

         row['host'] = hostname
         row['index'] = 'jira'
         row['source'] = 'jql'
         row['sourcetype'] = 'jira'
         row['_raw'] = row
         row['_time'] = int(time.time())

         results.append(row)

      if added_count > 0:
         offset = offset + added_count

      if added_count < count:
         break

   isp.outputResults(results, None, header)
 
except Exception, e:
   logger.exception(str(e))
   isp.generateErrorResults(str(e))
Exemple #44
0
                output += " " #" AND "
            output += AND
    if len(ORS) > 1:
        output += ")"

    si.outputResults([{'search': output}], messages)


if __name__ == '__main__':
    messages = {}
    try:
        (isgetinfo, sys.argv) = si.isGetInfo(sys.argv)
        argtext = ' '.join(sys.argv[1:])
        count, mapping = parseArgs(argtext)

        if isgetinfo:
            reqsop = True
            preop = "head %s" % count
            fields = [field for alias, field in mapping]
            if len(fields) > 0:
                preop += " | fields %s" % ', '.join(fields)
            si.outputInfo(False, False, False, reqsop, preop) # calls sys.exit()    
        run(messages, count, mapping)

    except Exception, e:
        import traceback
        stack =  traceback.format_exc()
        si.addErrorMessage(messages, "%s. Traceback: %s" % (e, stack.replace('\n','\\n')))
        si.outputResults([], messages)

Exemple #45
0
def jpath():
    try:
        keywords, options = si.getKeywordsAndOptions()
        legacy_args_fixer(options)

        defaultval = options.get('default', None)
        fn_input = options.get('input', options.get('field', '_raw'))
        fn_output = options.get('output', 'jpath')
        if len(keywords) != 1:
            si.generateErrorResults('Requires exactly one path argument.')
            sys.exit(0)
        path = keywords[0]

        # Handle literal (escaped) quotes.  Presumably necessary because of raw args?
        path = path.replace(r'\"', '"')

        if "*" in fn_output:
            apply_output = output_to_wildcard
        else:
            apply_output = output_to_field

        try:
            jp = jmespath.compile(path)
        except ParseError as e:
            # Todo:  Consider stripping off the last line "  ^" pointing to the issue.
            # Not helpful since Splunk wraps the error message in a really ugly way.
            si.generateErrorResults(
                "Invalid JMESPath expression '{}'. {}".format(path, e))
            sys.exit(0)

        results, dummyresults, settings = si.getOrganizedResults()
        # for each results
        for result in results:
            # get field value
            ojson = result.get(fn_input, None)
            added = False
            if ojson is not None:
                if isinstance(ojson, (list, tuple)):
                    # XXX: Add proper support for multivalue input fields.  Just use first value for now
                    ojson = ojson[0]
                try:
                    json_obj = json.loads(ojson)
                except ValueError:
                    # Invalid JSON.  Move on, nothing to see here.
                    continue
                try:
                    values = jp.search(json_obj, options=jp_options)
                    apply_output(values, fn_output, result)
                    result[ERROR_FIELD] = None
                    added = True
                except UnknownFunctionError as e:
                    # Can't detect invalid function names during the compile, but we want to treat
                    # these like syntax errors:  Stop processing immediately
                    si.generateErrorResults(
                        "Issue with JMESPath expression. {}".format(e))
                    sys.exit(0)
                except JMESPathError as e:
                    # Not 100% sure I understand what these errors mean. Should they halt?
                    result[ERROR_FIELD] = "JMESPath error: {}".format(e)
                except Exception as e:
                    result[ERROR_FIELD] = "Exception: {}".format(e)

            if not added and defaultval is not None:
                result[fn_output] = defaultval

        si.outputResults(results)
    except Exception as e:
        import traceback

        stack = traceback.format_exc()
        si.generateErrorResults("Error '%s'. %s" % (e, stack))
Exemple #46
0
   if sys.argv[1] == 'filters':
      filters =  client.service.getFavouriteFilters(auth)

      keys = (('author', None), ('id', None), ('name', None))

      results = []
      for filter in filters:
         row = jiracommon.flatten(filter, keys)
         logger.info(time.time())
         row['host'] = hostname
         row['source'] = "jira_soap"
         row['sourcetype'] = "jira_filters"
         row['_time'] = int(time.time())
         results.append(row)
      isp.outputResults(results)
      sys.exit(0)

   elif sys.argv[1] == 'issues':
      filter_id = sys.argv[-1]
      issues = client.service.getIssuesFromFilter(auth, filter_id)
   # TODO this 1000 issue max isn't working as expected - if there are more than 1000 results, no results are returned
   elif sys.argv[1] == 'search':
      search = sys.argv[-1]
      issues = (client.service.getIssuesFromTextSearch(auth, search, 1000) )
   elif sys.argv[1] == 'jqlsearch':
      jql = sys.argv[-1]
      issues = (client.service.getIssuesFromJqlSearch(auth, jql, 1000) )
   else:
      logger.fatal('invalid command')
      sys.exit(1)
Exemple #47
0
            for _t in threads:
                _t.join()
            _iops = sum(results)

            bandwidth = int(blocksize*_iops)
            #print " %sB blocks: %6.1f IO/s, %sB/s (%sbit/s)" % (greek(blocksize), _iops, greek(bandwidth, 1), greek(8*bandwidth, 1, 'si'))
	    #print strftime("%Y-%m-%d %H:%M:%S") + " location=%s, capacity=%s, threads=%d, block_size=%s, iops=%s" % (dev, mediasize(dev), num_threads, blocksize, _iops)
            #blocksize *= 2
	    runs-=1
	    now = str(int(time.mktime(time.localtime())))
	    def hello(results, settings):
	    	result = {}
	    	#result['string'] = strftime("%Y-%m-%d %H:%M:%S") + " location=%s, storage_type=%s, file_size_kb=%s, threads=%d, block_size=%s, iops=%s" % (dev, storage_type, file_size_kb, num_threads, blocksize, _iops)
		#results.append({'_time' : now, 'location' : dev, 'run_time_sec' : t, 'storage_type' : storage_type, 'file_size_kb' : file_size_kb, 'threads' : num_threads, 'block_size' : blocksize, 'iops' : _iops})
		results.append({'_time' : now, 'location' : dev, 'run_time_sec' : t, 'threads' : num_threads, 'block_size' : blocksize, 'iops' : _iops})
	    	return results

	    results, dummyresults, settings = si.getOrganizedResults()
	    results = hello(results, settings)
	    si.outputResults(results)

    except IOError, (err_no, err_str):
        raise SystemExit(err_str)
    except KeyboardInterrupt:
        print "caught ctrl-c, bye."

# eof.


Exemple #48
0
#
#f.close()
#
#############################

args = 'select_list ' + tableName + ' '

for arg in rowkey:
    args = args + arg + ' '

process = subprocess.Popen('./etu_hq.sh ' + args, shell=True, stdout=subprocess.PIPE)

#f = open('/root/result','w')

while(True):
    
    line = process.stdout.readline()
    
    if line:
        
        tmp = []
        tmp.append(json.loads(line))
        
#        f.write(str(tmp))
        
        isp.outputResults(json.loads(line))
    else:
        break

#f.close()
Exemple #49
0
            raise Exception("prebtool returned something in stderr: '%s'" % proc_err)

        host = socket.gethostname()
        stanza = ''
        lines = proc_out.split("\n")

        re_index = re.compile("^(.*).conf\s+\[(.*)\]")
        re_kv = re.compile("^(.*)\s+(\S+)\s+=\s+(.*)")
        
        results = []
        for line in lines:
            if line != '':
                res = {}
                index = re_index.match(line.strip())
                kv = re_kv.match(line.strip())
                if index:
                    stanza = index.group(2)
                elif kv:
                    res['host'] = host
                    res['conf'] = conf
                    res['file'] = kv.group(1)
                    res['stanza'] = stanza
                    res['key'] = kv.group(2)
                    res['value'] = kv.group(3)
                    results.append(res)
        logger.debug(pprint.pformat(results))
        si.outputResults(results, fields=['host','conf','file','stanza','key','value'])
        logger.debug("exited __main__")
    except Exception, e:
        si.generateErrorResults(e)
Exemple #50
0

if __name__ == '__main__':

    puppet_private_key  = puppet_config('hostprivkey')
    puppet_client_cert  = puppet_config('hostcert')
    puppet_master       = puppet_config('inventory_server')
    puppet_master_port  = puppet_config('inventory_port')

    conn = httplib.HTTPSConnection(puppet_master,
                            puppet_master_port,
                            key_file=puppet_private_key,
                            cert_file=puppet_client_cert)


    conn.request('GET', '/production/facts_search/search', None, {'Accept': 'pson'})

    resp = conn.getresponse()

    if resp.status == 200:
        puppet_hosts = json.loads(resp.read())
        puppet_hosts.sort()

        puppet_host_tab = [ dict((('fqdn',fqdn), ('host',fqdn.split('.')[0]))) \
                            for fqdn in puppet_hosts ]

        si.outputResults(puppet_host_tab)

    else:
        si.generateErrorResults("Error: Status '%d', Reason '%s'" % (resp.status, resp.reason))
Exemple #51
0
                reltime = "unknown"
            elif utc == None:
                reltime = "unknown"
            else:
                diff = int(now - float(utc))
                if diff < -60:
                    reltime = "future"
                elif diff < 0: # handle weird case of client clock off slightly
                    reltime = "now"
                elif diff == 0:
                    reltime = "now"
                elif diff < MINUTE:
                    reltime = unitval("second", diff)
                elif diff < HOUR:
                    reltime = unitval("minute", diff / MINUTE)
                elif diff < DAY:
                    reltime = unitval("hour", diff / HOUR)
                elif diff < MONTH:
                    reltime = unitval("day", diff / DAY)
                elif diff < YEAR:
                    reltime = unitval("month", diff / MONTH)
                else:
                    reltime = unitval("year", diff / YEAR)
            result['reltime'] = reltime
        si.outputResults(results)

    except Exception as e:
        import traceback
        stack =  traceback.format_exc()
        si.generateErrorResults("Error '%s'" % e)
    logger.error("gpath = %s" % gpath)
    # find all files matching
    complete_path = os.path.expanduser(
        os.path.expandvars(gpath))
    glob_matches = glob.glob(complete_path)
    logger.debug("complete path: %s" % complete_path)
    logger.debug("glob matches: %s" % glob_matches)

    if len(glob_matches)==0:
        logger.error("No file matching %s" % complete_path)
        raise Exception("No files matching %s." % complete_path)


    for pfile in glob_matches:
        logger.error("parsing file: %s" % pfile)
        results += parse_raw_pstack(pfile, thread_id, reverse, separator, fileorderindex)


    #return results
    return results

# noinspection PyUnreachableCode
if __name__ == '__main__':
    try:
        si.outputResults(raw_pstack(), messages, fields)
    except Exception, e:
        import traceback
        stack = traceback.format_exc()
        si.generateErrorResults("Following error occurred while parsing pstack: '%s'." % (e))
        logger.error("%s. %s" % (e, stack))