def __init__(self, connection, configuration):
     self.endpoint = (PROTOCOL + URL +
                      ENDPOINT) % (connection.get("region"))
     self.auth = configuration.get('auth')
     self.client = SumoLogic(self.auth.get("access_id"),
                             self.auth.get("access_key"),
                             endpoint=self.endpoint)
Exemple #2
0
def pingCollectors():
    config = ConfigParser.RawConfigParser()
    config.readfp(open('config.properties'))

    # SumoLogic Access ID and Access Key
    accessID = config.get("Config", "accessID")
    accessKey = config.get("Config", "accessKey")

    sumo = SumoLogic(accessID, accessKey)
    theBlob = sumo.collectors()
    outputMsg = "The following collectors are offline:\n"
    empty = True
    for stuff in theBlob:
        collectorName = ''
        collectorAlive = False
        for attribute, value in stuff.iteritems():
            if attribute == 'name':
                collectorName = value
            if attribute == 'alive':
                collectorAlive = value
        if collectorAlive == False:
            empty = False
            outputMsg += collectorName + "\n"
    if empty:
        return ''
    else:
        return outputMsg
class SumoResource(object):
    def __init__(self, props, *args, **kwargs):
        access_id, access_key, deployment = props["SumoAccessID"], props[
            "SumoAccessKey"], props["SumoDeployment"]
        self.deployment = deployment
        self.sumologic_cli = SumoLogic(access_id, access_key,
                                       self.api_endpoint)

    @abstractmethod
    def create(self, *args, **kwargs):
        pass

    @abstractmethod
    def update(self, *args, **kwargs):
        pass

    @abstractmethod
    def delete(self, *args, **kwargs):
        pass

    @abstractmethod
    def extract_params(self, event):
        pass

    @property
    def api_endpoint(self):
        if self.deployment == "us1":
            return "https://api.sumologic.com/api"
        elif self.deployment in [
                "ca", "au", "de", "eu", "jp", "us2", "fed", "in"
        ]:
            return "https://api.%s.sumologic.com/api" % self.deployment
        else:
            return 'https://%s-api.sumologic.net/api' % self.deployment

    def is_enterprise_or_trial_account(self):
        to_time = int(time.time()) * 1000
        from_time = to_time - 5 * 60 * 1000
        try:
            search_query = '''guardduty*
                | "IAMUser" as targetresource
                | "2" as sev
                | "UserPermissions" as threatName
                | "Recon" as threatPurpose
                | benchmark percentage as global_percent from guardduty on threatpurpose=threatPurpose, threatname=threatName, severity=sev, resource=targetresource'''
            response = self.sumologic_cli.search_job(search_query,
                                                     fromTime=from_time,
                                                     toTime=to_time)
            print("schedule job status: %s" % response)
            response = self.sumologic_cli.search_job_status(response)
            print("job status: %s" % response)
            if len(response.get("pendingErrors", [])) > 0:
                return False
            else:
                return True
        except Exception as e:
            if hasattr(e, "response") and e.response.status_code == 403:
                return False
            else:
                raise e
def pingCollectors():
	config = ConfigParser.RawConfigParser()
	config.readfp(open('config.properties'))

    # SumoLogic Access ID and Access Key
	accessID = config.get("Config", "accessID")
	accessKey = config.get("Config", "accessKey")

	sumo = SumoLogic(accessID, accessKey)
	theBlob = sumo.collectors()
	outputMsg = "The following collectors are offline:\n"
	empty = True
	for stuff in theBlob:
		collectorName = ''
		collectorAlive = False
		for attribute, value in stuff.iteritems():
			if attribute == 'name' : 
				collectorName = value
			if attribute == 'alive' :
				collectorAlive = value
		if collectorAlive == False :
			empty = False
			outputMsg += collectorName + "\n"
	if empty:
		return ''
	else:
		return outputMsg
Exemple #5
0
def getCollectors():
    error = None

    try:
        apiid = bleach.clean(request.json["apiid"])
        apikey = bleach.clean(request.json["apikey"])
    except Exception as error:
        return render_template("index.html", error="Problem with your API keys")

    sumo = SumoLogic(apiid, apikey)
    data = sumo.collectors()

    return jsonify(results=data)
Exemple #6
0
class Resource(object):
    def __init__(self, access_id, access_key, deployment):
        self.deployment = deployment
        self.sumologic_cli = SumoLogic(access_id, access_key,
                                       self.api_endpoint)

    @abstractmethod
    def create(self, *args, **kwargs):
        pass

    @abstractmethod
    def update(self, *args, **kwargs):
        pass

    @abstractmethod
    def delete(self, *args, **kwargs):
        pass

    @abstractmethod
    def extract_params(self, event):
        pass

    @property
    def api_endpoint(self):
        if self.deployment == "us1":
            return "https://api.sumologic.com/api"
        elif self.deployment in ["ca", "au", "de", "eu", "jp", "us2"]:
            return "https://api.%s.sumologic.com/api" % self.deployment
        else:
            return 'https://%s-api.sumologic.net/api' % self.deployment

    def is_enterprise_or_trial_account(self):
        to_time = int(time.time()) * 1000
        from_time = to_time - 5 * 60 * 1000
        try:
            response = self.sumologic_cli.search_job("benchmarkcat guardduty",
                                                     fromTime=from_time,
                                                     toTime=to_time)
            print("schedule job status: %s" % response)
            response = self.sumologic_cli.search_job_status(response)
            print("job status: %s" % response)
            if len(response.get("pendingErrors", [])) > 0:
                return False
            else:
                return True
        except Exception as e:
            if hasattr(e, "response") and e.response.status_code == 403:
                return False
            else:
                raise e
 def post_to_http_source(self, joined_results):
     """
     Posts joined_results to a collector in Sumo sumologic
     Input:
         joined_results (List)            : query_results with abuseIPDB results joined into each dictionary
     Return (None):
         Posts joined_results to specified selector
     """
     if self.postAuthorization is True:
         sumo_api_post = SumoLogic(
             self.sumo_access_id,
             self.sumo_access_key,
             endpoint=
             "https://endpoint1.collection.us2.sumologic.com/receiver/v1/http/"
         )
         post_object = sumo_api_post.post(self.uniqueHTTPCollectorCode,
                                          joined_results)
         print('\n')
         print(post_object)
     else:
         print("\nPost authorization disabled.\n")
Exemple #8
0
def get_sumologic_fields(config, q, from_time, to_time, time_zone):
    fields = []

    sumologic_access_id = config['sumologic_access_id']
    sumologic_access_key = config['sumologic_access_key']
    sumologic_root_url = config['sumologic_root_url']

    LOGGER.info("Run query in sumologic")
    sumo = SumoLogic(sumologic_access_id, sumologic_access_key, sumologic_root_url)

    delay = 5
    search_job = sumo.search_job(q, from_time, to_time, time_zone)

    status = sumo.search_job_status(search_job)
    while status['state'] != 'DONE GATHERING RESULTS':
        if status['state'] == 'CANCELLED':
            break
        time.sleep(delay)
        status = sumo.search_job_status(search_job)

    LOGGER.info(status['state'])

    if status['state'] == 'DONE GATHERING RESULTS':
        response = sumo.search_job_records(search_job, limit=1)

        fields = response['fields']

    return fields
Exemple #9
0
def addCollector():

    data = request.json
    sumo = SumoLogic(data["apiid"], data["apikey"])
    response = {"errors": [], "success": []}

    params = {}
    remove = ("apiid", "apikey", "collectors", "selected")
    for key in data.keys():
        if key not in remove:
            params[key] = data[key]

    # Convert boolean string to booleans
    payload = {"source": fixBooleans(params)}

    for collector in data["collectors"]:
        endpoint = "/collectors/%s/sources" % collector["id"]
        response = sumo.post(endpoint, payload)
        sleep(0.15)

    # TODO: actually return useful information
    return jsonify(results=response)
Exemple #10
0
 def __init__(self, config):
     super(BaseAction, self).__init__(config)
     access_id = self.config.get('sumologic_access_id', None)
     access_key = self.config.get('sumologic_access_key', None)
     sumologic_collectors_limit = self.config.get(
         'sumologic_collectors_limit', 1000)
     self._sumologic_access_key = access_key or None
     self._sumologic_access_id = access_id or None
     self._sumologic_collectors_limit = sumologic_collectors_limit or 1000
     if self._sumologic_access_key is None or self._sumologic_access_id is None:
         self._client = None
     else:
         self._client = SumoLogic(self._sumologic_access_id,
                                  self._sumologic_access_key)
class APIClient:
    def __init__(self, connection, configuration):
        self.endpoint = (PROTOCOL + URL +
                         ENDPOINT) % (connection.get("region"))
        self.auth = configuration.get('auth')
        self.client = SumoLogic(self.auth.get("access_id"),
                                self.auth.get("access_key"),
                                endpoint=self.endpoint)

    def ping_data_source(self):
        # Pings the data source
        self.client.collectors()
        return SumoLogicResponse(200, True)

    def create_search(self, query_expression):
        # Queries the data source
        query_expression = json.loads(query_expression)
        search_job = self.client.search_job(query_expression['query'],
                                            query_expression['fromTime'],
                                            query_expression['toTime'])
        return SumoLogicResponse(200, search_job['id'])

    def get_search_status(self, search_id):
        # Check the current status of the search
        search_id = {"id": search_id}
        status = self.client.search_job_status(search_id)
        return SumoLogicResponse(200, status['state'])

    def get_search_results(self, search_id, offset, limit):
        # Return the search results. Results must be in JSON format before being translated into STIX
        search_id = {"id": search_id}
        result = self.client.search_job_messages(search_id, limit, offset)

        response = (self.client.get("/users"))
        user_details = response.json()["data"][0]

        results = [r['map'] for r in result['messages']]
        for r in results:
            r.update(user_details)
            r["displayName"] = user_details["firstName"] + " " + user_details[
                "lastName"]

        return SumoLogicResponse(200, results)

    def delete_search(self, search_id):
        # Optional since this may not be supported by the data source API
        # Delete the search
        search_id = {"id": search_id}
        x = self.client.delete_search_job(search_id)
        return SumoLogicResponse(200, x.json())
Exemple #12
0
def get_sumologic_records(config, q, from_time, to_time, time_zone, limit):
    records = []

    sumologic_access_id = config['sumologic_access_id']
    sumologic_access_key = config['sumologic_access_key']
    sumologic_root_url = config['sumologic_root_url']

    LOGGER.info("Run query in sumologic")
    sumo = SumoLogic(sumologic_access_id, sumologic_access_key, sumologic_root_url)

    now_datetime = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')
    custom_columns = {
        '_SDC_EXTRACTED_AT': now_datetime,
        '_SDC_BATCHED_AT': now_datetime,
        '_SDC_DELETED_AT': None
    }
    delay = 5
    search_job = sumo.search_job(q, from_time, to_time, time_zone)
    LOGGER.info(search_job)

    status = sumo.search_job_status(search_job)
    while status['state'] != 'DONE GATHERING RESULTS':
        if status['state'] == 'CANCELLED':
            break
        time.sleep(delay)
        LOGGER.info(":check query status")
        status = sumo.search_job_status(search_job)
        LOGGER.info(status)

    LOGGER.info(status['state'])

    if status['state'] == 'DONE GATHERING RESULTS':
        record_count = status['recordCount']
        count = 0
        while count < record_count:
            LOGGER.info("Get records %d of %d, limit=%d", count, record_count, limit)
            response = sumo.search_job_records(search_job, limit=limit, offset = count)
            LOGGER.info("Got records %d of %d", count, record_count)

            recs = response['records']
            # extract the result maps to put them in the list of records
            for rec in recs:
                records.append({**rec['map'], **custom_columns})

            if len(recs) > 0:
                count = count + len(recs)
            else:
                break # make sure we exit if nothing comes back

    return records
    def updatecollectorlistdestination(self):
        self.listWidgetDestinationCollectors.clear() #clear the list first since it might already be populated
        destinationurl = self.loadedapiurls[str(self.comboBoxDestinationRegion.currentText())] #get the selected API URL
        destinationusername = str(self.DestinationUserName.text()) #get username
        destinationpassword = str(self.DestinationPassword.text()) #get password
        self.destinationcollectordict = {} #init this so we can store a dict of collectors (easier to access than list)
        regexprog = re.compile(r'\S+') # make sure username and password have something in them
        if (re.match(regexprog, destinationusername) is not None) and (re.match(regexprog,destinationpassword) is not None):
            #access the API with provided credentials
            self.sumodestination = SumoLogic(destinationusername, destinationpassword, endpoint=destinationurl)
            try:
                self.destinationcollectors = self.sumodestination.collectors() #get list of collectors
                for collector in self.destinationcollectors:
                    self.destinationcollectordict[collector['name']]=collector['id'] #make a dict with just names and ids

                for collector in self.destinationcollectordict:
                    self.listWidgetDestinationCollectors.addItem(collector) #populate the list widget in the GUI

                #set up a signal to update the source list if anything is changed
                self.listWidgetDestinationCollectors.currentItemChanged.connect(self.updatedestinationlistsource)
            except:
                self.errorbox('Incorrect Credentials.')
        else:
            self.errorbox('No user and/or password.')
# Sets an attribute across all collectors and sources in a given account.
#
# python bulk-set.py <accessId/email> <accessKey/password> <attribute> <value>

import pprint
import sys
import time

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
delay = .25
time.sleep(delay)
attr, val = args[3], args[4]
cs = sumo.collectors()
time.sleep(delay)
f = [{
    u'regexp': u'\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.(\\d{1,3})',
    u'mask': u'255',
    u'filterType': u'Mask',
    u'name': u'last octet mask'
}]

for c in cs:
    if 'category' not in c or 'bwe' not in c['category'] and 'bwm' not in c[
            'category']:
        print 'collector: ' + c['name']
        ss = sumo.sources(c['id'])
        time.sleep(delay)
        for s in ss:
Exemple #15
0
# cat query.sumoql | python search-job.py <accessId> <accessKey> \
# https://api.us2.sumologic.com/api/v1/ 1408643380441 1408649380441 PST false

import json
import sys
import time
import logging

logging.basicConfig(level=logging.DEBUG)

from sumologic import SumoLogic

LIMIT = 42

args = sys.argv
sumo = SumoLogic(args[1], args[2], args[3])
fromTime = args[4]
toTime = args[5]
timeZone = args[6]
byReceiptTime = args[7]

delay = 5
q = ' '.join(sys.stdin.readlines())
sj = sumo.search_job(q, fromTime, toTime, timeZone, byReceiptTime)

status = sumo.search_job_status(sj)
while status['state'] != 'DONE GATHERING RESULTS':
    if status['state'] == 'CANCELLED':
        break
    time.sleep(delay)
    status = sumo.search_job_status(sj)
# Remove collectors where field contains some specified string
#
# python rm-collectors.py <accessId> <accessKey> <field> <string>

import sys

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
field, string = args[3], args[4]
cs = sumo.collectors()

for c in cs:
	if field in c and string in c[field]:
		cv, _ = sumo.collector(c['id'])
		print sumo.delete_collector(cv).text
# Deletes all sources (not collectors) in a given category.
#
# python rm-src-by-cat.py <accessId> <accessKey> <category>

import sys

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
cat = args[3]
cs = sumo.collectors()

for c in cs:
	ss = sumo.sources(c['id'])
	for s in ss:
		if s['category'] == cat:
			sv, _ = sumo.source(c['id'], s['id'])
			print sumo.delete_source(c['id'], sv).text
Exemple #18
0
# Renames a category across all collectors and sources in a given account.
#
# python mv-cat.py <accessId/email> <accessKey/password> <fromName> <toName>
#
# TODO update query category constraints
# TODO regex

import sys

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
fromCat, toCat = args[3], args[4]
cs = sumo.collectors()

for c in cs:
    if 'category' in c and c['category'] == fromCat:
        cv, etag = sumo.collector(c['id'])
        cv['collector']['category'] = toCat
        print sumo.update_collector(cv, etag).text
    ss = sumo.sources(c['id'])
    for s in ss:
        if s['category'] == fromCat:
            sv, etag = sumo.source(c['id'], s['id'])
            sv['source']['category'] = toCat
            print sumo.update_source(c['id'], sv, etag).text
Exemple #19
0
            traceback.print_exc()
        logger.exception("error generating sumo query " + str(file) + "----" +
                         str(e))
        with open(
                os.path.join(
                    args.outdir,
                    "sigma-" + file_basename + '-error-generation.txt'),
                "w") as f:
            # f.write(json.dumps(r, indent=4, sort_keys=True) + " ERROR: %s\n\nQUERY: %s" % (e, sumo_query))
            f.write(" ERROR for file: %s\n\Exception:\n %s" % (file, e))
        continue

    try:
        # Run query
        # https://github.com/SumoLogic/sumologic-python-sdk/blob/3ad8033deb028ac45ac4099f11c04785fa426f51/scripts/search-job.py
        sumo = SumoLogic(args.accessid, args.accesskey, args.endpoint)
        toTime = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")
        fromTime = datetime.datetime.strptime(
            toTime, "%Y-%m-%dT%H:%M:%S") - datetime.timedelta(hours=24)
        fromTime = fromTime.strftime("%Y-%m-%dT%H:%M:%S")
        timeZone = 'UTC'
        byReceiptTime = True

        sj = sumo.search_job(sumo_query, fromTime, toTime, timeZone,
                             byReceiptTime)

        status = sumo.search_job_status(sj)
        while status['state'] != 'DONE GATHERING RESULTS':
            if status['state'] == 'CANCELLED':
                break
            time.sleep(delay)
Exemple #20
0
# Queries for new hosts and builds a monitor for each
#
# python mk-monitors-by-host.py <accessId> <accessKey> <monitor.json>
#
# where <monitor.json> describes the monitor
#
# 1/ Run query for new hosts
# 2/ Find appropriate dashboard
# 3/ Modify monitor.json query constraint
# 4/ PUT monitor.json

import sys

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
oldWindow = args[3]
newWindow = args[4]

ds = sumo.dashboards()
    def runsearch(self):

        self.tableWidgetSearchResults.clear()
        selectedtimezone = str(self.comboBoxTimeZone.currentText())
        starttime = str(self.dateTimeEditSearchStartTime.dateTime().toString(QtCore.Qt.ISODate))
        endtime = str(self.dateTimeEditSearchEndTime.dateTime().toString(QtCore.Qt.ISODate))
        sourceurl = self.loadedapiurls[str(self.comboBoxSourceRegion.currentText())]
        sourceusername = str(self.SourceUserName.text())
        sourcepassword = str(self.SourcePassword.text())
        searchstring = str(self.plainTextEditSearch.toPlainText())
        regexprog = re.compile(r'\S+')
        jobsubmitted = False
        savetofile = self.checkBoxSaveSearch.isChecked()
        converttimefromepoch = self.checkBoxConvertTimeFromEpoch.isChecked()
        self.jobmessages = []
        self.jobrecords = []

        if (re.match(regexprog,sourceusername) != None) and (re.match(regexprog,sourcepassword) != None):
            self.sumosource = SumoLogic(sourceusername, sourcepassword, endpoint=sourceurl)

            if (re.match(regexprog, searchstring)) != None:
                try:
                    searchjob = self.sumosource.search_job(searchstring, starttime, endtime, selectedtimezone)
                    jobsubmitted = True
                except:
                    self.errorbox('Incorrect Credentials.')
                if jobsubmitted:
                    self.labelSearchResultCount.setText('0')
                    jobstatus = self.sumosource.search_job_status(searchjob)
                    nummessages = jobstatus['messageCount']
                    numrecords = jobstatus['recordCount']
                    self.labelSearchResultCount.setText(str(nummessages))
                    while jobstatus['state'] == 'GATHERING RESULTS':
                        time.sleep(5)
                        jobstatus = self.sumosource.search_job_status(searchjob)
                        numrecords = jobstatus['recordCount']
                        nummessages = jobstatus['messageCount']
                        self.labelSearchResultCount.setText(str(nummessages))
                    if nummessages is not 0:

                        #return messages
                        if self.buttonGroupOutputType.checkedId() == -2:
                            iterations = nummessages // 10000 + 1
                            for iteration in range (1,iterations + 1):
                                messages = self.sumosource.search_job_messages(searchjob,limit=10000,offset=((iteration-1)*10000))
                                for message in messages['messages']:
                                    self.jobmessages.append(message)
                            self.tableWidgetSearchResults.setRowCount(len(self.jobmessages))
                            self.tableWidgetSearchResults.setColumnCount(2)
                            self.tableWidgetSearchResults.setHorizontalHeaderLabels(['time','_raw'])
                            index = 0
                            for message in self.jobmessages:
                                if converttimefromepoch:
                                    timezone = pytz.timezone(selectedtimezone)
                                    converteddatetime = datetime.fromtimestamp(float(message['map']['_messagetime']) / 1000, timezone)
                                    timestring = str(converteddatetime.strftime('%Y-%m-%d %H:%M:%S'))
                                    message['map']['_messagetime'] = timestring
                                self.tableWidgetSearchResults.setItem(index,0,QtGui.QTableWidgetItem(message['map']['_messagetime']))
                                self.tableWidgetSearchResults.setItem(index,1,QtGui.QTableWidgetItem(message['map']['_raw']))
                                index += 1
                            self.tableWidgetSearchResults.resizeRowsToContents()
                            self.tableWidgetSearchResults.resizeColumnsToContents()
                            if savetofile:
                                filename = QtGui.QFileDialog.getSaveFileName(self, 'Save CSV', '', selectedFilter='*.csv')
                                if filename:
                                    with open(filename,'wb') as csvfile:
                                        messagecsv = csv.DictWriter(csvfile,self.jobmessages[0]['map'].keys())
                                        messagecsv.writeheader()
                                        for entry in self.jobmessages:
                                            messagecsv.writerow(entry['map'])
                        #return records
                        if self.buttonGroupOutputType.checkedId() == -3:
                            iterations = numrecords // 10000 + 1
                            for iteration in range (1,iterations + 1):
                                records = self.sumosource.search_job_records(searchjob,limit=10000,offset=((iteration-1)*10000))
                                for record in records['records']:
                                    self.jobrecords.append(record)
                            self.tableWidgetSearchResults.setRowCount(len(self.jobrecords))
                            numfields = len(records['fields'])
                            self.tableWidgetSearchResults.setColumnCount(numfields)
                            fieldnames = []
                            for field in records['fields']:
                                fieldnames.append(field['name'])
                            self.tableWidgetSearchResults.setHorizontalHeaderLabels(fieldnames)
                            index = 0
                            for record in self.jobrecords:
                                columnnum = 0
                                for fieldname in fieldnames:
                                    if converttimefromepoch and (fieldname == '_timeslice'):
                                        timezone = pytz.timezone(selectedtimezone)
                                        converteddatetime = datetime.fromtimestamp(float(record['map'][fieldname]) / 1000, timezone)
                                        timestring = str(converteddatetime.strftime('%Y-%m-%d %H:%M:%S'))
                                        record['map']['_timeslice'] = timestring
                                    self.tableWidgetSearchResults.setItem(index, columnnum, QtGui.QTableWidgetItem(record['map'][fieldname]))
                                    columnnum += 1
                                index += 1
                            self.tableWidgetSearchResults.resizeRowsToContents()
                            self.tableWidgetSearchResults.resizeColumnsToContents()
                            if savetofile:
                                filename = QtGui.QFileDialog.getSaveFileName(self, 'Save CSV', '', selectedFilter='*.csv')
                                if filename:
                                    with open(filename,'wb') as csvfile:
                                        recordcsv = csv.DictWriter(csvfile,self.jobrecords[0]['map'].keys())
                                        recordcsv.writeheader()
                                        for entry in self.jobrecords:
                                            recordcsv.writerow(entry['map'])
                    else:
                        self.errorbox('Search did not return any messages.')
            else:
                self.errorbox('Please enter a search.')
        else:
            self.errorbox('No user and/or password.')
# Get collectors where field contains some specified string
#
# python get-collectors.py <accessId> <accessKey> <field> <string>

import sys

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
field, string = args[3], args[4]
cs = sumo.collectors()

for c in cs:
	if field in c and string in c[field]:
		print(sumo.sources(c['id']))
Exemple #23
0
def putCollectors():

    data = request.json
    sumo = SumoLogic(data["apiid"], data["apikey"])
    response = {"errors": [], "success": []}

    # Go through each collector in the collector_map:
    for collectorid in data["collector_map"].keys():

        # Go through each source for a collector listed in the collector map
        for sourceid in data["collector_map"][collectorid]:

            # Find the souce that matches the name (they're by name, for UI)
            for sourcename in data["source_names"].keys():

                # Do we skip this source altogether? (Over)Complicated by transient nature of 'selected'
                if not "selected" in data["source_names"][sourcename] or (
                    "selected" in data["source_names"][sourcename] and not data["source_names"][sourcename]["selected"]
                ):

                    # If there's a match, send the source to Sumo for update
                    if sourceid in data["source_names"][sourcename]["memberids"]:

                        # Are we just here to delete?
                        if (
                            "delete" in data["source_names"][sourcename]
                            and data["source_names"][sourcename]["delete"] == True
                        ):
                            print "- Deleting collector %s's source %s named %s." % (
                                collectorid,
                                str(sourceid),
                                sourcename,
                            )
                            result = sumo.delete_source(collectorid, {"source": {"id": sourceid}})
                            print "- Delete Source: %s" % result.status_code
                        else:
                            # We'll be mutating this, so keep the original re-usable
                            sourcepayload = deepcopy(data["source_names"][sourcename]["flattened"])

                            # Blacklists must be a list of path expressions, or missing:
                            if "blacklist" in sourcepayload and not isinstance(sourcepayload["blacklist"], list):
                                blklst = []
                                [
                                    blklst.append(blacklist.strip())
                                    for blacklist in sourcepayload["blacklist"].split(",")
                                ]
                                sourcepayload["blacklist"] = blklst

                            # Remove keys marked to be ignored
                            for ignorekey in data["source_names"][sourcename]["ignore"]:
                                if ignorekey in sourcepayload:
                                    del sourcepayload[ignorekey]

                            # The ID is deliberately absent from the flattened data, add
                            sourcepayload["id"] = sourceid

                            # Grrrrr:
                            # "All modifiable fields must be provided, and all immutable
                            # fields must match those existing in the system." --Sumo
                            sourcepayload["sourceType"] = data["all_sources"][str(sourceid)]["sourceType"]

                            # Convert boolean string to booleans
                            sourcepayload = fixBooleans(sourcepayload)

                            print "+ Updating Collector %s's source %s named %s" % (collectorid, sourceid, sourcename)

                            # You have to get the etag from a collector call
                            # TODO: refactor the initial fetch to include this somehow.
                            throwaway, etag = sumo.source(collectorid, sourceid)
                            result = sumo.update_source(collectorid, {"source": sourcepayload}, etag)
                            sleep(0.15)

                            print "+ Source Update: %s" % result.status_code  # , result.text)

                            # if str(result.status_code).startswith("2"):
                            #     response['success'].append(result)
                            # else:
                            #     response['errors'].append(result)

                        break
                else:
                    print ". Skipping source %s" % sourcename

    # TODO: actually return useful information
    return jsonify(results=response)
# Renames a category across all collectors and sources in a given account.
#
# python mv-cat.py <accessId/email> <accessKey/password> <fromName> <toName>
#
# TODO update query category constraints
# TODO regex

import sys

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
fromCat, toCat = args[3], args[4]
cs = sumo.collectors()

for c in cs:
	if 'category' in c and c['category'] == fromCat:
		cv, etag = sumo.collector(c['id'])
		cv['collector']['category'] = toCat
		print sumo.update_collector(cv, etag).text
	ss = sumo.sources(c['id'])
	for s in ss:
		if s['category'] == fromCat:
			sv, etag = sumo.source(c['id'], s['id'])
			sv['source']['category'] = toCat
			print sumo.update_source(c['id'], sv, etag).text
#try:
#    import http.client as http_client
#except ImportError:
#    # Python 2
#    import httplib as http_client
#http_client.HTTPConnection.debuglevel = 1
#
## You must initialize logging, otherwise you'll not see debug output.
#logging.basicConfig()
#logging.getLogger().setLevel(logging.DEBUG)
#requests_log = logging.getLogger("requests.packages.urllib3")
#requests_log.setLevel(logging.DEBUG)
#requests_log.propagate = True

args = sys.argv
sumo = SumoLogic(args[1], args[2])
path, from_expr, to_expr = args[3], args[4], args[5]
cs = sumo.contents(path)['children']

for c in cs:
    if c['type'] == 'Search':
        print 'before: ' + c['searchQuery'] + '\n'
        c['searchQuery'] = string.replace(c['searchQuery'], from_expr, to_expr,
                                          1)
        print 'after: ' + c['searchQuery'] + '\n'
    elif c['type'] == 'Dashboard':
        for col in c['columns']:
            for m in col['monitors']:
                print 'before: ' + m['searchQuery'] + '\n'
                m['searchQuery'] = string.replace(m['searchQuery'], from_expr,
                                                  to_expr, 1)
# Create source in the collector, where field contains some specified string
#
# python create-source.py <accessId/email> <accessKey/password> <field> <string> <source definition json>

import sys
import json

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
field, string = args[3], args[4]
source = json.loads(args[5])
cs = sumo.collectors()

for c in cs:
        print(c)
        print(field, string)
	if field in c and string in c[field]:
		print(sumo.create_source(c['id'],source))
 def connect(self, params):
     access_id = params.get("access_id").get("secretKey")
     access_key = params.get("access_key").get("secretKey")
     self.client = SumoLogic(access_id, access_key)
class sumotoolbox(QtGui.QMainWindow, Ui_MainWindow):

    def __init__(self):
        QtGui.QMainWindow.__init__(self)
        Ui_MainWindow.__init__(self)
        # detect if we are running in a pyinstaller bundle and set the base directory for file loads"
        if getattr(sys,'frozen', False):
            self.basedir = sys._MEIPASS
        else:
            self.basedir = os.path.dirname(__file__)

        self.setupUi(self)
        self.initModels()   #load all the comboboxes and such with values
        self.loadcredentials()  #if a credential file exists populate the creds with values
        # init all of the dialogs we'll be using
        self.collectorcopyUI = uic.loadUi(qtCollectorCopyDialogUI)
        self.restoresourcesUI = uic.loadUi(qtRestoreSourcesDialogUI)
        self.deletesourceUI = uic.loadUi(qtDeleteSourcesDialogUI)
        # connect all of the UI button elements to their respective methods
        self.pushButtonUpdateListSource.clicked.connect(self.updatecollectorlistsource)
        self.pushButtonUpdateListDestination.clicked.connect(self.updatecollectorlistdestination)
        self.pushButtonCopyCollectorsToDest.clicked.connect(self.copysourcesfromsourcetodestinationdialog)
        self.pushButtonStartSearch.clicked.connect(self.runsearch)
        self.pushButtonBackupCollector.clicked.connect(self.backupcollector)
        self.pushButtonRestoreSources.clicked.connect(self.restoresources)
        self.pushButtonDeleteSources.clicked.connect(self.deletesource)

    def loadcredentials(self):

        #look to see if the credential file exists and load credentials if it does
        #fail if anything at all goes wrong
        if os.path.isfile(os.path.join(self.basedir,'data/credentials.json')):
            try:
                with open(os.path.join(self.basedir,'data/credentials.json'), 'r') as filepointer:
                    credentials = json.load(filepointer)
                self.SourceUserName.setText(credentials['source']['user'])
                self.SourcePassword.setText(credentials['source']['password'])
                self.DestinationUserName.setText(credentials['destination']['user'])
                self.DestinationPassword.setText(credentials['destination']['password'])
            except:
                pass

    def updatecollectorlistsource(self):
        self.listWidgetSourceCollectors.clear()     #clear the list first since it might already be populated
        sourceurl = self.loadedapiurls[str(self.comboBoxSourceRegion.currentText())] #get the selected API URL
        sourceusername = str(self.SourceUserName.text()) #get username
        sourcepassword = str(self.SourcePassword.text()) #get password
        self.sourcecollectordict = {} #init this so we can store a dict of collectors (easier to access than list)
        regexprog = re.compile(r'\S+') # make sure username and password have something in them
        if (re.match(regexprog,sourceusername) != None) and (re.match(regexprog,sourcepassword) != None):
            #access the API with provided credentials
            self.sumosource = SumoLogic(sourceusername, sourcepassword, endpoint=sourceurl)
            try:
                self.sourcecollectors = self.sumosource.collectors() #get list of collectors
                for collector in self.sourcecollectors:
                    self.sourcecollectordict[collector['name']]=collector['id'] #make a dict with just names and ids

                for collector in self.sourcecollectordict:
                    self.listWidgetSourceCollectors.addItem(collector) #populate the list widget in the GUI

                #set up a signal to update the source list if anything is changed
                self.listWidgetSourceCollectors.currentItemChanged.connect(self.updatesourcelistsource)
            except:
                self.errorbox('Incorrect Credentials.')
        else:
           self.errorbox('No user and/or password.')

    def updatecollectorlistdestination(self):
        self.listWidgetDestinationCollectors.clear() #clear the list first since it might already be populated
        destinationurl = self.loadedapiurls[str(self.comboBoxDestinationRegion.currentText())] #get the selected API URL
        destinationusername = str(self.DestinationUserName.text()) #get username
        destinationpassword = str(self.DestinationPassword.text()) #get password
        self.destinationcollectordict = {} #init this so we can store a dict of collectors (easier to access than list)
        regexprog = re.compile(r'\S+') # make sure username and password have something in them
        if (re.match(regexprog, destinationusername) is not None) and (re.match(regexprog,destinationpassword) is not None):
            #access the API with provided credentials
            self.sumodestination = SumoLogic(destinationusername, destinationpassword, endpoint=destinationurl)
            try:
                self.destinationcollectors = self.sumodestination.collectors() #get list of collectors
                for collector in self.destinationcollectors:
                    self.destinationcollectordict[collector['name']]=collector['id'] #make a dict with just names and ids

                for collector in self.destinationcollectordict:
                    self.listWidgetDestinationCollectors.addItem(collector) #populate the list widget in the GUI

                #set up a signal to update the source list if anything is changed
                self.listWidgetDestinationCollectors.currentItemChanged.connect(self.updatedestinationlistsource)
            except:
                self.errorbox('Incorrect Credentials.')
        else:
            self.errorbox('No user and/or password.')

    def updatesourcelistsource(self, currentcollector, prevcollector):

        self.listWidgetSourceSources.clear() #clear the list first since it might already be populated
        if currentcollector != None: #check to make sure that there is a collector selected
            self.sourcesourcesdict = {}
            # populate the list of sources
            self.sourcesources = self.sumosource.sources(self.sourcecollectordict[str(currentcollector.text())])
            for source in self.sourcesources:
                self.sourcesourcesdict[source['name']]='' #this is sloppy but I just want a dict of names
            for source in self.sourcesourcesdict:
                self.listWidgetSourceSources.addItem(source) #populate the display with sources

    def updatedestinationlistsource(self, currentcollector, prevcollector):

        self.listWidgetDestinationSources.clear() #clear the list first since it might already be populated
        if currentcollector != None: #check to make sure that there is a collector selected
            self.destinationsourcesdict = {}
            # populate the list of sources
            self.destinationsources = self.sumodestination.sources(self.destinationcollectordict[str(currentcollector.text())])
            for source in self.destinationsources:
                self.destinationsourcesdict[source['name']]='' #this is sloppy but I just want a dict of names
            for source in self.destinationsourcesdict:
                self.listWidgetDestinationSources.addItem(source) #populate the display with sources

    def copysourcesfromsourcetodestinationdialog(self):

        sourcecollector = self.listWidgetSourceCollectors.selectedItems() #get the selected source collector
        if len (sourcecollector) == 1: #make sure there is a collector selected, otherwise bail
            sourcecollector = sourcecollector[0].text() #qstring to string conversion
            destinationcollector = self.listWidgetDestinationCollectors.selectedItems() #get the selected dest collector
            if len(destinationcollector) == 1: #make sure there is a collector selected, otherwise bail

                destinationcollectorqstring = destinationcollector[0]
                destinationcollector = str(destinationcollector[0].text()) #qstring to string conversion
                sourcesources = self.listWidgetSourceSources.selectedItems() #get the selected sources
                if len(sourcesources) > 0: #make sure at least one source is selected
                    sourcelist = []
                    for source in sourcesources: #iterate through source names to build a warning message
                        sourcelist.append(source.text())
                    message = "You are about to copy the following sources from collector \"" + sourcecollector + "\" to \"" + destinationcollector + "\". Is this correct? \n\n"
                    for source in sourcelist:
                        message = message + source + "\n"
                    self.collectorcopyUI.labelCollectorCopy.setText(message) #set the warning message in the copy dialog
                    self.collectorcopyUI.dateTimeEdit.setMaximumDate(QtCore.QDate.currentDate()) #make sure user doesn't pick time in future
                    self.collectorcopyUI.dateTimeEdit.setDate(QtCore.QDate.currentDate()) #set date to today
                    result = self.collectorcopyUI.exec_() #bring up the copy dialog
                    #process collection time override inputs
                    overridecollectiondate = self.collectorcopyUI.checkBoxOverrideCollectionStartTime.isChecked()
                    overridedate = self.collectorcopyUI.dateTimeEdit.dateTime()
                    #THIS IS BROKEN.... for some reason the API will not accept the following as valid Epoch time
                    #Maybe the longint isn't getting the "L" appended to it?
                    overridedatemillis = long(overridedate.currentMSecsSinceEpoch())
                    if result:  #If they clicked "OK" rather than cancel
                        for source in sourcelist: #iterate through the selected sources and copy them
                            for sumosource in self.sourcesources:
                                #
                                if sumosource['name'] == source:
                                    if 'id' in sumosource: #the API creates an ID so this must be deleted before sending
                                        del sumosource['id']
                                    if 'alive' in sumosource:
                                        del sumosource['alive'] #the API sets this itself so this must be deleted before sending
                                    if overridecollectiondate:
                                        sumosource['cutoffTimestamp'] = overridedatemillis
                                    template = {}
                                    template['source'] = sumosource #the API expects a dict with a key called 'source'
                                    notduplicate = True
                                    for sumodest in self.destinationsources:
                                        if sumodest['name'] == source: #make sure the source doesn't already exist in the destination
                                            notduplicate = False
                                    if notduplicate: #finally lets copy this thing
                                        self.sumodestination.create_source(self.destinationcollectordict[destinationcollector], template)
                                    else:
                                        self.errorbox(source + ' already exists, skipping.')
                        #call the update method for the dest sources since they have changed after the copy
                        self.updatedestinationlistsource(destinationcollectorqstring, destinationcollectorqstring)


                else:
                    self.errorbox('No Sources Selected.')
            else:
                self.errorbox('No Destination Collector Selected.')
        else:
            self.errorbox('No Source Collector Selected.')

    def backupcollector(self):
        sourcecollector = self.listWidgetSourceCollectors.selectedItems() #get which sources have been selected
        if len (sourcecollector) == 1: #make sure something was selected
            if self.sourcesources: #make sure there's something to write to the file
                sourcecollector = str(sourcecollector[0].text()) + r'.json'
                savefile = str(QtGui.QFileDialog.getSaveFileName(self, 'Save As...', sourcecollector))
                if savefile:
                    with open(savefile, 'w') as filepointer:
                        json.dump(self.sourcesources, filepointer)
                    self.infobox('Wrote file ' + savefile)
            else:
                self.errorbox('No sources to backup.')

        else:
            self.errorbox('No Source Collector Selected.')

    def restoresources(self):
        destinationcollector = self.listWidgetDestinationCollectors.selectedItems()
        if len(destinationcollector) == 1:
            destinationcollectorqstring = destinationcollector[0]
            destinationcollector = str(destinationcollector[0].text())
            restorefile = str(QtGui.QFileDialog.getOpenFileName(self, 'Open Backup..','',selectedFilter='*.json'))
            sources = None
            try:
                with open(restorefile) as data_file:
                    sources = json.load(data_file)
            except:
                self.errorbox('Failed to load JSON file.')
            if sources:
                self.restoresourcesUI.dateTimeEdit.setMaximumDate(QtCore.QDate.currentDate())
                self.restoresourcesUI.dateTimeEdit.setDate(QtCore.QDate.currentDate())
                self.restoresourcesUI.listWidgetRestoreSources.clear()
                sourcedict = {}
                for source in sources:
                    sourcedict[source['name']]=''
                for source in sourcedict:
                    self.restoresourcesUI.listWidgetRestoreSources.addItem(source)
                result = self.restoresourcesUI.exec_()
                overridecollectiondate = self.restoresourcesUI.checkBoxOverrideCollectionStartTime.isChecked()
                overridedate = self.restoresourcesUI.dateTimeEdit.dateTime()
                overridedatemillis = long(overridedate.currentMSecsSinceEpoch())
                if result:
                    selectedsources = self.restoresourcesUI.listWidgetRestoreSources.selectedItems()
                    if len(selectedsources) > 0:
                        for selectedsource in selectedsources:
                            for sumosource in sources:
                                    if sumosource['name'] == str(selectedsource.text()):
                                        if 'id' in sumosource:
                                            del sumosource['id']
                                        if 'alive' in sumosource:
                                            del sumosource['alive']
                                        if overridecollectiondate:
                                            sumosource['cutoffTimestamp'] = overridedatemillis
                                        template = {}
                                        template['source'] = sumosource
                                        notduplicate = True
                                        for sumodest in self.destinationsources:
                                            if sumodest['name'] == source:
                                                notduplicate = False
                                        if notduplicate:
                                            self.sumodestination.create_source(self.destinationcollectordict[destinationcollector], template)
                                        else:
                                            self.errorbox(source + ' already exists, skipping.')
                            self.updatedestinationlistsource(destinationcollectorqstring, destinationcollectorqstring)
                    else:
                        self.errorbox('No sources selected for import.')





        else:
            self.errorbox('No Destination Collector Selected.')

    def deletesource(self):
        sourcetodelete = self.listWidgetDestinationSources.selectedItems()
        if len(sourcetodelete) > 1:
            self.errorbox('Too many sources selected. There can be only one!')
        if len(sourcetodelete) == 1:
            message = "You are about to delete the following source:\n\n" + str(sourcetodelete[0].text()) + '\n\nIf you are sure type "DELETE" in the box below.'
            self.deletesourceUI.labelDeleteSources.setText(message)
            result = self.deletesourceUI.exec_()
            if result:
                if str(self.deletesourceUI.lineEditVerifyDelete.text()) == "DELETE":
                    destinationcollector = self.listWidgetDestinationCollectors.selectedItems()
                    destinationcollectorqstring = destinationcollector[0]
                    destinationcollector = str(destinationcollector[0].text())
                    destinationcollectorid = self.destinationcollectordict[destinationcollector]
                    for destinationsource in self.destinationsources:
                        if destinationsource['name'] == str(sourcetodelete[0].text()):
                            self.sumodestination.delete_source_by_id(destinationcollectorid, destinationsource['id'])
                            self.updatedestinationlistsource(destinationcollectorqstring, destinationcollectorqstring)
                else:
                    self.errorbox('You failed to type "DELETE". Crisis averted!')
        else:
            self.errorbox('No source selected.')

    def runsearch(self):

        self.tableWidgetSearchResults.clear()
        selectedtimezone = str(self.comboBoxTimeZone.currentText())
        starttime = str(self.dateTimeEditSearchStartTime.dateTime().toString(QtCore.Qt.ISODate))
        endtime = str(self.dateTimeEditSearchEndTime.dateTime().toString(QtCore.Qt.ISODate))
        sourceurl = self.loadedapiurls[str(self.comboBoxSourceRegion.currentText())]
        sourceusername = str(self.SourceUserName.text())
        sourcepassword = str(self.SourcePassword.text())
        searchstring = str(self.plainTextEditSearch.toPlainText())
        regexprog = re.compile(r'\S+')
        jobsubmitted = False
        savetofile = self.checkBoxSaveSearch.isChecked()
        converttimefromepoch = self.checkBoxConvertTimeFromEpoch.isChecked()
        self.jobmessages = []
        self.jobrecords = []

        if (re.match(regexprog,sourceusername) != None) and (re.match(regexprog,sourcepassword) != None):
            self.sumosource = SumoLogic(sourceusername, sourcepassword, endpoint=sourceurl)

            if (re.match(regexprog, searchstring)) != None:
                try:
                    searchjob = self.sumosource.search_job(searchstring, starttime, endtime, selectedtimezone)
                    jobsubmitted = True
                except:
                    self.errorbox('Incorrect Credentials.')
                if jobsubmitted:
                    self.labelSearchResultCount.setText('0')
                    jobstatus = self.sumosource.search_job_status(searchjob)
                    nummessages = jobstatus['messageCount']
                    numrecords = jobstatus['recordCount']
                    self.labelSearchResultCount.setText(str(nummessages))
                    while jobstatus['state'] == 'GATHERING RESULTS':
                        time.sleep(5)
                        jobstatus = self.sumosource.search_job_status(searchjob)
                        numrecords = jobstatus['recordCount']
                        nummessages = jobstatus['messageCount']
                        self.labelSearchResultCount.setText(str(nummessages))
                    if nummessages is not 0:

                        #return messages
                        if self.buttonGroupOutputType.checkedId() == -2:
                            iterations = nummessages // 10000 + 1
                            for iteration in range (1,iterations + 1):
                                messages = self.sumosource.search_job_messages(searchjob,limit=10000,offset=((iteration-1)*10000))
                                for message in messages['messages']:
                                    self.jobmessages.append(message)
                            self.tableWidgetSearchResults.setRowCount(len(self.jobmessages))
                            self.tableWidgetSearchResults.setColumnCount(2)
                            self.tableWidgetSearchResults.setHorizontalHeaderLabels(['time','_raw'])
                            index = 0
                            for message in self.jobmessages:
                                if converttimefromepoch:
                                    timezone = pytz.timezone(selectedtimezone)
                                    converteddatetime = datetime.fromtimestamp(float(message['map']['_messagetime']) / 1000, timezone)
                                    timestring = str(converteddatetime.strftime('%Y-%m-%d %H:%M:%S'))
                                    message['map']['_messagetime'] = timestring
                                self.tableWidgetSearchResults.setItem(index,0,QtGui.QTableWidgetItem(message['map']['_messagetime']))
                                self.tableWidgetSearchResults.setItem(index,1,QtGui.QTableWidgetItem(message['map']['_raw']))
                                index += 1
                            self.tableWidgetSearchResults.resizeRowsToContents()
                            self.tableWidgetSearchResults.resizeColumnsToContents()
                            if savetofile:
                                filename = QtGui.QFileDialog.getSaveFileName(self, 'Save CSV', '', selectedFilter='*.csv')
                                if filename:
                                    with open(filename,'wb') as csvfile:
                                        messagecsv = csv.DictWriter(csvfile,self.jobmessages[0]['map'].keys())
                                        messagecsv.writeheader()
                                        for entry in self.jobmessages:
                                            messagecsv.writerow(entry['map'])
                        #return records
                        if self.buttonGroupOutputType.checkedId() == -3:
                            iterations = numrecords // 10000 + 1
                            for iteration in range (1,iterations + 1):
                                records = self.sumosource.search_job_records(searchjob,limit=10000,offset=((iteration-1)*10000))
                                for record in records['records']:
                                    self.jobrecords.append(record)
                            self.tableWidgetSearchResults.setRowCount(len(self.jobrecords))
                            numfields = len(records['fields'])
                            self.tableWidgetSearchResults.setColumnCount(numfields)
                            fieldnames = []
                            for field in records['fields']:
                                fieldnames.append(field['name'])
                            self.tableWidgetSearchResults.setHorizontalHeaderLabels(fieldnames)
                            index = 0
                            for record in self.jobrecords:
                                columnnum = 0
                                for fieldname in fieldnames:
                                    if converttimefromepoch and (fieldname == '_timeslice'):
                                        timezone = pytz.timezone(selectedtimezone)
                                        converteddatetime = datetime.fromtimestamp(float(record['map'][fieldname]) / 1000, timezone)
                                        timestring = str(converteddatetime.strftime('%Y-%m-%d %H:%M:%S'))
                                        record['map']['_timeslice'] = timestring
                                    self.tableWidgetSearchResults.setItem(index, columnnum, QtGui.QTableWidgetItem(record['map'][fieldname]))
                                    columnnum += 1
                                index += 1
                            self.tableWidgetSearchResults.resizeRowsToContents()
                            self.tableWidgetSearchResults.resizeColumnsToContents()
                            if savetofile:
                                filename = QtGui.QFileDialog.getSaveFileName(self, 'Save CSV', '', selectedFilter='*.csv')
                                if filename:
                                    with open(filename,'wb') as csvfile:
                                        recordcsv = csv.DictWriter(csvfile,self.jobrecords[0]['map'].keys())
                                        recordcsv.writeheader()
                                        for entry in self.jobrecords:
                                            recordcsv.writerow(entry['map'])
                    else:
                        self.errorbox('Search did not return any messages.')
            else:
                self.errorbox('Please enter a search.')
        else:
            self.errorbox('No user and/or password.')

    def errorbox(self, message):
        msgBox = QtGui.QMessageBox()
        msgBox.setWindowTitle('Error')
        msgBox.setText(message)
        msgBox.addButton(QtGui.QPushButton('OK'), QtGui.QMessageBox.RejectRole)
        ret = msgBox.exec_()

    def infobox(self, message):
        msgBox = QtGui.QMessageBox()
        msgBox.setWindowTitle('Info')
        msgBox.setText(message)
        msgBox.addButton(QtGui.QPushButton('OK'), QtGui.QMessageBox.RejectRole)
        ret = msgBox.exec_()

    def initModels(self):
        # Load API Endpoint List from file and create model for the comboboxes
        with open(os.path.join(self.basedir,'data/apiurls.json'), 'r') as infile:
            self.loadedapiurls=json.load(infile)

        self.apiurlsmodel = QtGui.QStandardItemModel()
        for key in self.loadedapiurls:
            text_item = QtGui.QStandardItem(key)
            self.apiurlsmodel.appendRow(text_item)

        self.comboBoxSourceRegion.setModel(self.apiurlsmodel)
        self.comboBoxDestinationRegion.setModel(self.apiurlsmodel)

        #Load Timezones and create model for timezone combobox

        self.timezonemodel = QtGui.QStandardItemModel()
        for zone in pytz.common_timezones:
            text_item = QtGui.QStandardItem(zone)
            self.timezonemodel.appendRow(text_item)

        self.comboBoxTimeZone.setModel(self.timezonemodel)

        # set search start and endtimes to now-ish
        self.dateTimeEditSearchStartTime.setDateTime(QtCore.QDateTime.currentDateTime().addSecs(-900))
        self.dateTimeEditSearchEndTime.setDateTime(QtCore.QDateTime.currentDateTime())

        # set timezone combobox to local timezone
        localtimezone = str(get_localzone())
        index = self.comboBoxTimeZone.findText(localtimezone, QtCore.Qt.MatchFixedString)
        if index >= 0:
            self.comboBoxTimeZone.setCurrentIndex(index)
Exemple #29
0
# Create source in the collector, where field contains some specified string
#
# python create-source.py <accessId/email> <accessKey/password> <field> <string> <source definition json>

import sys
import json

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
field, string = args[3], args[4]
source = json.loads(args[5])
cs = sumo.collectors()

for c in cs:
    print(c)
    print(field, string)
    if field in c and string in c[field]:
        print(sumo.create_source(c['id'], source))
    def __init__(self):
        """
        Checks for command line arguements
        Input (None):
            sys.argv[]                       : System command line arguemnts
        Return (None):
            query (String)                   : The SumoLogic query
            defaultQuery (Bool)              : Indicates if default query was executed
            fromTime (String)                : Date for query to start search
            toTime (String)                  : Date for query to end search
            timeZone (String)                : Timezone so query knows what timezone to align its search times to
            current_date (String)            : Today's date
            uniqueHTTPCollectorCode (String) : The code for the collector to post to
            sumo_access_id (String)          : Needed to access SumoLogic through Sumo Logic Python SDK
            sumo_access_key (String)         : Needed to access SumoLogic through Sumo Logic Python SDK
            abuse_apiV2_key (String)         : Needed to access AbuseIPDB api
            abuseIPDB_days (String)          : Number of days AbuseIPDB api will use when looking up ip
        """
        try:
            sys.argv[1]  #query assignment
        except:
            sys.exit("Status Code 1\n1 parameters is missing (query).")
        else:
            if sys.argv[1] == "default-all":
                self.defaultQuery = True
                from default_query import query
                if not query:
                    sys.exit(
                        "Status Code 1\n1 parameters is missing in defaults (query)."
                    )
                self.query = query
                from default_query import current_date, fromTime, toTime, timeZone
                if not fromTime or not toTime or not timeZone or not current_date:
                    sys.exit(
                        "Status Code 1\nAt least 1 time parameters is missing in defaults (fromTime, toTime, timeZone, or current_date)."
                    )
                self.fromTime = fromTime
                self.toTime = toTime
                self.timeZone = timeZone
                self.current_date = current_date
                from sumo_collector_code import uniqueHTTPCollectorCode
                if not uniqueHTTPCollectorCode:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (uniqueHTTPCollectorCode)."
                    )
                self.uniqueHTTPCollectorCode = uniqueHTTPCollectorCode
                self.postAuthorization = True
                from api_keys import sumo_access_id, sumo_access_key
                if not sumo_access_id or not sumo_access_key:
                    sys.exit(
                        "Status Code 1\nAt least 1 sumo access parameters is missing  in defaults (sumo_access_id or sumo_access_key)."
                    )
                self.sumo_access_id = sumo_access_id
                self.sumo_access_key = sumo_access_key
                self.sumo_api = SumoLogic(sumo_access_id, sumo_access_key)
                from api_keys import abuse_apiV2_key
                if not abuse_apiV2_key:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (abuse_apiV2_key)."
                    )
                self.abuse_apiV2_key = abuse_apiV2_key
                from abuseipdb_parameters import abuseIPDB_days
                if not abuseIPDB_days:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (abuseIPDB_days)."
                    )
                self.abuseIPDB_days = abuseIPDB_days
                self.log_directory = "./logs"
                return
            elif sys.argv[1] == "default":
                self.defaultQuery = True
                from default_query import query
                if not query:
                    sys.exit(
                        "Status Code 1\n1 parameters is missing in defaults (query)."
                    )
                self.query = query
            else:
                self.defaultQuery = False
                with open(str(sys.argv[1])) as file:
                    self.query = str(file.read())

        try:
            sys.argv[2]  #fromTime assignment
            sys.argv[3]  #toTime assignment
            sys.argv[4]  #timeZone assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 time parameters is missing (fromTime, toTime, or timeZone)."
            )
        else:
            if sys.argv[2] == sys.argv[3] == sys.argv[4] == "default":
                from default_query import current_date, fromTime, toTime, timeZone
                if not fromTime or not toTime or not timeZone or not current_date:
                    sys.exit(
                        "Status Code 1\nAt least 1 time parameters is missing in defaults (fromTime, toTime, timeZone, or current_date)."
                    )
                self.fromTime = fromTime
                self.toTime = toTime
                self.timeZone = timeZone
                self.current_date = current_date
            elif sys.argv[2] or sys.argv[
                    3] or sys.argv[4] != "default" and sys.argv[2] or sys.argv[
                        3] or sys.argv[4] == "default":
                sys.exit(
                    "Status Code 1\nAll parameters must be default or inputted (fromTime, toTime, or timeZone)."
                )
            else:
                from default_query import current_date
                if not current_date:
                    sys.exit(
                        "Status Code 1\nAt least 1 time parameters is missing in defaults (current_date)."
                    )
                self.fromTime = sys.argv[2]
                self.toTime = sys.argv[3]
                self.timeZone = sys.argv[4]
                self.current_date = current_date

        try:
            sys.argv[5]  #uniqueHTTPCollectorCode assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 parameters is missing (uniqueHTTPCollectorCode)."
            )
        else:
            if sys.argv[5] == "default":
                from sumo_collector_code import uniqueHTTPCollectorCode
                if not uniqueHTTPCollectorCode:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (uniqueHTTPCollectorCode)."
                    )
                self.uniqueHTTPCollectorCode = uniqueHTTPCollectorCode
                self.postAuthorization = True
            elif sys.argv[5] == "False":
                self.uniqueHTTPCollectorCode = ""
                self.postAuthorization = False
            else:
                self.uniqueHTTPCollectorCode = sys.argv[5]
                self.postAuthorization = True

        try:
            sys.argv[6]  #sumo_access_id assignment
            sys.argv[7]  #sumo_access_key assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 sumo access parameters is missing (sumo_access_id or sumo_access_key)."
            )
        else:
            if sys.argv[6] == sys.argv[7] == "default":
                from api_keys import sumo_access_id, sumo_access_key
                if not sumo_access_id or not sumo_access_key:
                    sys.exit(
                        "Status Code 1\nAt least 1 sumo access parameters is missing  in defaults (sumo_access_id or sumo_access_key)."
                    )
                self.sumo_access_id = sumo_access_id
                self.sumo_access_key = sumo_access_key
                self.sumo_api = SumoLogic(sumo_access_id, sumo_access_key)
            elif sys.argv[6] == "default" and sys.argv[
                    7] != "default" or sys.argv[6] != "default" and sys.argv[
                        7] == "default":
                sys.exit(
                    "Status Code 1\nBoth parameters must be default or inputted (sumo_access_id or sumo_access_key)."
                )
            else:
                self.sumo_access_id = sys.argv[6]
                self.sumo_access_key = sys.argv[7]
                self.sumo_api = SumoLogic(self.sumo_access_id,
                                          self.sumo_access_key)

        try:
            sys.argv[8]  #abuse_apiV2_key assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 parameters is missing (abuse_apiV2_key)."
            )
        else:
            if sys.argv[8] == "default":
                from api_keys import abuse_apiV2_key
                if not abuse_apiV2_key:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (abuse_apiV2_key)."
                    )
                self.abuse_apiV2_key = abuse_apiV2_key
            else:
                self.abuse_apiV2_key = sys.argv[8]

        try:
            sys.argv[9]  #abuseIPDB_days assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 parameters is missing (abuseIPDB_days)."
            )
        else:
            if sys.argv[9] == "default":
                from abuseipdb_parameters import abuseIPDB_days
                if not abuseIPDB_days:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (abuseIPDB_days)."
                    )
                self.abuseIPDB_days = abuseIPDB_days
            else:
                self.abuseIPDB_days = sys.argv[9]

        try:
            sys.argv[10]  #log_directory assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 parameters is missing (log_directory)."
            )
        else:
            if sys.argv[10] == "default":
                self.log_directory = "./logs"
            else:
                self.log_directory = sys.argv[10]
# Get collectors where field contains some specified string
#
# python get-collectors.py <accessId/email> <accessKey/password> <field> <string>

import sys

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
field, string = args[3], args[4]
cs = sumo.collectors()

for c in cs:
    if field in c and string in c[field]:
        print c
Exemple #32
0
# Get all dashboard data
#
# python get-dashboard-data.py <accessId> <accessKey>

import sys

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2], args[3])
ds = sumo.dashboards(True)
print ds

#for d in ds:
#	print d
#try:
#    import http.client as http_client
#except ImportError:
#    # Python 2
#    import httplib as http_client
#http_client.HTTPConnection.debuglevel = 1
#
## You must initialize logging, otherwise you'll not see debug output.
#logging.basicConfig()
#logging.getLogger().setLevel(logging.DEBUG)
#requests_log = logging.getLogger("requests.packages.urllib3")
#requests_log.setLevel(logging.DEBUG)
#requests_log.propagate = True

args = sys.argv
sumo = SumoLogic(args[1], args[2])
path, from_expr, to_expr = args[3], args[4], args[5]
cs = sumo.contents(path)['children']

for c in cs:
	if c['type'] == 'Search':
		print('before: ' + c['searchQuery'] + '\n')
		c['searchQuery'] = string.replace(c['searchQuery'], from_expr, to_expr, 1)
		print('after: ' + c['searchQuery'] + '\n')
	elif c['type'] == 'Dashboard':
		for col in c['columns']:
			for m in col['monitors']:
				print('before: ' + m['searchQuery'] + '\n')
				m['searchQuery'] = string.replace(m['searchQuery'], from_expr, to_expr, 1)
				print('after: ' + m['searchQuery'] + '\n')
print(sumo.create_contents(string.strip(path, '/').split('/')[0], cs))
# Remove collectors where field contains some specified string
#
# python rm-collectors.py <accessId> <accessKey> <field> <string>

import sys

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
field, string = args[3], args[4]
cs = sumo.collectors()

for c in cs:
    if field in c and string in c[field]:
        cv, _ = sumo.collector(c['id'])
        print(sumo.delete_collector(cv).text)
# python usage-report.py <accessId/email> <accessKey/password> <orgId> <fromTime> <toTime> <timezone> <timeslice> <email>
#
# TODO per-source
# TODO log hook
# TODO delete jobs?

from email.mime.text import MIMEText
import json
from smtplib import SMTP
import sys

from sumologic import SumoLogic

args = sys.argv

sumo = SumoLogic(args[1], args[2], "https://long-api.sumologic.net/api/v1")
orgId = args[3]
fromTime = args[4]
toTime = args[5]
timezone = args[6]
timeslice = args[7]
fromEmail = '*****@*****.**'
toEmail = args[8]

lookup = "lookup/collector_name"

q = r"""
_sourceCategory=config "Collector by name and ID" !GURR "%s"
| parse "[logger=*]" as logger
| where logger = "scala.config.LoggingVisitor"
| parse "Collector by name and ID, id: '*', decimal: '*', name: '*', organization ID: '*', decimal: '*', organization name: '*', organization type: '*'"
# Sets an attribute across all collectors and sources in a given account.
#
# python bulk-set.py <accessId> <accessKey> <attribute> <value>

import pprint
import sys
import time

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
delay = .25
time.sleep(delay)
attr, val = args[3], args[4]
cs = sumo.collectors()
time.sleep(delay)
f = [{u'regexp': u'\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.(\\d{1,3})', u'mask': u'255', u'filterType': u'Mask', u'name': u'last octet mask'}]

for c in cs:
    if 'category' not in c or 'bwe' not in c['category'] and 'bwm' not in c['category']:
        print 'collector: ' + c['name']
        ss = sumo.sources(c['id'])
        time.sleep(delay)
        for s in ss:
            sv, etag = sumo.source(c['id'], s['id'])
            time.sleep(delay)
            svi = sv['source']
            if 'category' not in svi or 'bwe' not in svi['category'] and 'bwm' not in svi['category']:
                print 'source: ' + svi['name']
                svi['filters'] = f
 def __init__(self, props, *args, **kwargs):
     access_id, access_key, deployment = props["SumoAccessID"], props[
         "SumoAccessKey"], props["SumoDeployment"]
     self.deployment = deployment
     self.sumologic_cli = SumoLogic(access_id, access_key,
                                    self.api_endpoint)
 def connect(self, params):
     access_id = params.get('access_id').get('secretKey')
     access_key = params.get('access_key').get('secretKey')
     self.client = SumoLogic(access_id, access_key)
#
# Example:
#
# cat query.sumoql | python search-job.py <accessId/email> <accessKey/password> \
# 1408643380441 1408649380441 PST

import json
import sys
import time

from sumologic import SumoLogic

LIMIT = 42

args = sys.argv
sumo = SumoLogic(args[1], args[2])
fromTime = args[3]
toTime = args[4]
timeZone = args[5]

delay = 2
q = ' '.join(sys.stdin.readlines())
sj = sumo.search_job(q, fromTime, toTime, timeZone)

status = sumo.search_job_status(sj)
while status['state'] != 'DONE GATHERING RESULTS':
	if status['state'] == 'CANCELLED':
		break
	time.sleep(delay)
	delay *= 2
	status = sumo.search_job_status(sj)
Exemple #40
0
def getSources():

    """
    We're going to build a big ol' data structure that, in the end,
    should look something like this:

    sourceinfo = {
        'all_sources': {
            8583428: [{COMPLETE SOURCE OBJECT}],
            8583111: [{COMPLETE SOURCE OBJECT}],
            8583846: [{COMPLETE SOURCE OBJECT}],
            8583298: [{COMPLETE SOURCE OBJECT}]
        },
        'source_names': {
            'Friendly Source Name #1': {
                'memberids': [8583428, 8583111],
                'flattened' : {FLATTENED OBJECT VALUES},
                'ignore': {},
                'collectors': ['prodweb01', 'stagedb02']
            },
            'Friendly Source Name #2': {
                'memberids': [8583846, 8583298],
                'flattened': {
                    'something like this': "",
                    'category': ['System', 'System2'],
                    'encoding': ['UTF-8', 'ISO-8859-1']
                },
            }
        },
        'collector_map': {
            8583422: [8583428, 8583111],
            8583401: [8583846, 8583298],
        }
    }
    """

    sourceinfo = {"all_sources": {}, "source_names": {}, "collector_map": {}}

    data = request.json
    sumo = SumoLogic(data["apiid"], data["apikey"])

    for collector in data["collectors"]:
        # Get each collector's source from Sumo & store locally:

        sources = sumo.sources(collector["id"])
        sleep(0.15)

        # Add a key of the collector's id to the map. This map maps
        # sources to collectors so we don't have to keep asking Sumo.
        if collector["id"] not in sourceinfo["collector_map"]:
            sourceinfo["collector_map"][collector["id"]] = []

        for source in sources:

            # Add to the main sourceinfo list of all sources
            if source["id"] not in sourceinfo["all_sources"]:
                sourceinfo["all_sources"][source["id"]] = source

            # We need to group the sources by their name, but keep their
            # data in the original format, so we'll make a dictionary of
            # names and a list of source ids that have them:
            if source["name"] not in sourceinfo["source_names"]:
                sourceinfo["source_names"][source["name"]] = {
                    "memberids": [],
                    "flattened": {},
                    "ignore": {},
                    "collectors": [],
                }

            sourceinfo["source_names"][source["name"]]["memberids"].append(source["id"])
            sourceinfo["source_names"][source["name"]]["collectors"].append(collector["name"])

            # Update the collector_map with this source's key
            sourceinfo["collector_map"][collector["id"]].append(source["id"])

    # Finally, we're grouping the sources together by name for bulk editing
    # so we'll check to see if there are any sources that differ from
    # each other using sets.  The best way to do this is to go through the
    # sources that are listed with the same source_names:

    for sourcename in sourceinfo["source_names"]:

        # Each unique source name has members. Those member's
        # values will be flattened into the flattener!
        flattener = defaultdict(set)
        for memberid in sourceinfo["source_names"][sourcename]["memberids"]:
            for srckey, srcval in sourceinfo["all_sources"][memberid].iteritems():
                # Sets don't like lists. Unpacking accomplishes almost the same goal
                if isinstance(srcval, list):
                    for item in srcval:
                        flattener[srckey].add(item)
                else:
                    flattener[srckey].add(srcval)

        # What got flattened?
        # Since we have to tie the Angular data model to a flattened version (sigh)
        # we build this flattened dict instead of using the original sources. It
        # makes things hard in the long run, but it kinda has to be that way:
        for k, v in flattener.iteritems():
            if k not in uneditableSourceFields:
                if len(v) == 1:
                    sourceinfo["source_names"][sourcename]["flattened"][k] = list(v)[0]
                elif len(v) > 1:
                    sourceinfo["source_names"][sourcename]["flattened"][k] = list(v)
                elif len(v) == 0:
                    sourceinfo["source_names"][sourcename]["flattened"][k] = []

        # Not all values are returned with every API call.
        # Blacklist is one of them, but it needs to be here
        # in order to be rendered in the template.  This
        # should be handled better eventually. TODO
        if "blacklist" not in flattener:
            sourceinfo["source_names"][sourcename]["flattened"]["blacklist"] = []

    # That's it.  Just return that to Angular
    return jsonify(results=sourceinfo)
Exemple #41
0
 def __init__(self, access_id, access_key, deployment):
     self.deployment = deployment
     self.sumologic_cli = SumoLogic(access_id, access_key,
                                    self.api_endpoint)
# cat query.sumoql | python search-job.py <accessId> <accessKey> \
# https://api.us2.sumologic.com/api/v1 1408643380441 1408649380441 PST false

import json
import sys
import time
import logging

logging.basicConfig(level=logging.DEBUG)

from sumologic import SumoLogic

LIMIT = 42

args = sys.argv
sumo = SumoLogic(args[1], args[2], args[3])
fromTime = args[4]
toTime = args[5]
timeZone = args[6]
byReceiptTime = args[7]

delay = 5
q = ' '.join(sys.stdin.readlines())
sj = sumo.search_job(q, fromTime, toTime, timeZone, byReceiptTime)

status = sumo.search_job_status(sj)
while status['state'] != 'DONE GATHERING RESULTS':
    if status['state'] == 'CANCELLED':
        break
    time.sleep(delay)
    status = sumo.search_job_status(sj)
class SumoLogic_AbuseIPDB_Aggregator():
    def __init__(self):
        """
        Checks for command line arguements
        Input (None):
            sys.argv[]                       : System command line arguemnts
        Return (None):
            query (String)                   : The SumoLogic query
            defaultQuery (Bool)              : Indicates if default query was executed
            fromTime (String)                : Date for query to start search
            toTime (String)                  : Date for query to end search
            timeZone (String)                : Timezone so query knows what timezone to align its search times to
            current_date (String)            : Today's date
            uniqueHTTPCollectorCode (String) : The code for the collector to post to
            sumo_access_id (String)          : Needed to access SumoLogic through Sumo Logic Python SDK
            sumo_access_key (String)         : Needed to access SumoLogic through Sumo Logic Python SDK
            abuse_apiV2_key (String)         : Needed to access AbuseIPDB api
            abuseIPDB_days (String)          : Number of days AbuseIPDB api will use when looking up ip
        """
        try:
            sys.argv[1]  #query assignment
        except:
            sys.exit("Status Code 1\n1 parameters is missing (query).")
        else:
            if sys.argv[1] == "default-all":
                self.defaultQuery = True
                from default_query import query
                if not query:
                    sys.exit(
                        "Status Code 1\n1 parameters is missing in defaults (query)."
                    )
                self.query = query
                from default_query import current_date, fromTime, toTime, timeZone
                if not fromTime or not toTime or not timeZone or not current_date:
                    sys.exit(
                        "Status Code 1\nAt least 1 time parameters is missing in defaults (fromTime, toTime, timeZone, or current_date)."
                    )
                self.fromTime = fromTime
                self.toTime = toTime
                self.timeZone = timeZone
                self.current_date = current_date
                from sumo_collector_code import uniqueHTTPCollectorCode
                if not uniqueHTTPCollectorCode:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (uniqueHTTPCollectorCode)."
                    )
                self.uniqueHTTPCollectorCode = uniqueHTTPCollectorCode
                self.postAuthorization = True
                from api_keys import sumo_access_id, sumo_access_key
                if not sumo_access_id or not sumo_access_key:
                    sys.exit(
                        "Status Code 1\nAt least 1 sumo access parameters is missing  in defaults (sumo_access_id or sumo_access_key)."
                    )
                self.sumo_access_id = sumo_access_id
                self.sumo_access_key = sumo_access_key
                self.sumo_api = SumoLogic(sumo_access_id, sumo_access_key)
                from api_keys import abuse_apiV2_key
                if not abuse_apiV2_key:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (abuse_apiV2_key)."
                    )
                self.abuse_apiV2_key = abuse_apiV2_key
                from abuseipdb_parameters import abuseIPDB_days
                if not abuseIPDB_days:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (abuseIPDB_days)."
                    )
                self.abuseIPDB_days = abuseIPDB_days
                self.log_directory = "./logs"
                return
            elif sys.argv[1] == "default":
                self.defaultQuery = True
                from default_query import query
                if not query:
                    sys.exit(
                        "Status Code 1\n1 parameters is missing in defaults (query)."
                    )
                self.query = query
            else:
                self.defaultQuery = False
                with open(str(sys.argv[1])) as file:
                    self.query = str(file.read())

        try:
            sys.argv[2]  #fromTime assignment
            sys.argv[3]  #toTime assignment
            sys.argv[4]  #timeZone assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 time parameters is missing (fromTime, toTime, or timeZone)."
            )
        else:
            if sys.argv[2] == sys.argv[3] == sys.argv[4] == "default":
                from default_query import current_date, fromTime, toTime, timeZone
                if not fromTime or not toTime or not timeZone or not current_date:
                    sys.exit(
                        "Status Code 1\nAt least 1 time parameters is missing in defaults (fromTime, toTime, timeZone, or current_date)."
                    )
                self.fromTime = fromTime
                self.toTime = toTime
                self.timeZone = timeZone
                self.current_date = current_date
            elif sys.argv[2] or sys.argv[
                    3] or sys.argv[4] != "default" and sys.argv[2] or sys.argv[
                        3] or sys.argv[4] == "default":
                sys.exit(
                    "Status Code 1\nAll parameters must be default or inputted (fromTime, toTime, or timeZone)."
                )
            else:
                from default_query import current_date
                if not current_date:
                    sys.exit(
                        "Status Code 1\nAt least 1 time parameters is missing in defaults (current_date)."
                    )
                self.fromTime = sys.argv[2]
                self.toTime = sys.argv[3]
                self.timeZone = sys.argv[4]
                self.current_date = current_date

        try:
            sys.argv[5]  #uniqueHTTPCollectorCode assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 parameters is missing (uniqueHTTPCollectorCode)."
            )
        else:
            if sys.argv[5] == "default":
                from sumo_collector_code import uniqueHTTPCollectorCode
                if not uniqueHTTPCollectorCode:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (uniqueHTTPCollectorCode)."
                    )
                self.uniqueHTTPCollectorCode = uniqueHTTPCollectorCode
                self.postAuthorization = True
            elif sys.argv[5] == "False":
                self.uniqueHTTPCollectorCode = ""
                self.postAuthorization = False
            else:
                self.uniqueHTTPCollectorCode = sys.argv[5]
                self.postAuthorization = True

        try:
            sys.argv[6]  #sumo_access_id assignment
            sys.argv[7]  #sumo_access_key assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 sumo access parameters is missing (sumo_access_id or sumo_access_key)."
            )
        else:
            if sys.argv[6] == sys.argv[7] == "default":
                from api_keys import sumo_access_id, sumo_access_key
                if not sumo_access_id or not sumo_access_key:
                    sys.exit(
                        "Status Code 1\nAt least 1 sumo access parameters is missing  in defaults (sumo_access_id or sumo_access_key)."
                    )
                self.sumo_access_id = sumo_access_id
                self.sumo_access_key = sumo_access_key
                self.sumo_api = SumoLogic(sumo_access_id, sumo_access_key)
            elif sys.argv[6] == "default" and sys.argv[
                    7] != "default" or sys.argv[6] != "default" and sys.argv[
                        7] == "default":
                sys.exit(
                    "Status Code 1\nBoth parameters must be default or inputted (sumo_access_id or sumo_access_key)."
                )
            else:
                self.sumo_access_id = sys.argv[6]
                self.sumo_access_key = sys.argv[7]
                self.sumo_api = SumoLogic(self.sumo_access_id,
                                          self.sumo_access_key)

        try:
            sys.argv[8]  #abuse_apiV2_key assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 parameters is missing (abuse_apiV2_key)."
            )
        else:
            if sys.argv[8] == "default":
                from api_keys import abuse_apiV2_key
                if not abuse_apiV2_key:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (abuse_apiV2_key)."
                    )
                self.abuse_apiV2_key = abuse_apiV2_key
            else:
                self.abuse_apiV2_key = sys.argv[8]

        try:
            sys.argv[9]  #abuseIPDB_days assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 parameters is missing (abuseIPDB_days)."
            )
        else:
            if sys.argv[9] == "default":
                from abuseipdb_parameters import abuseIPDB_days
                if not abuseIPDB_days:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (abuseIPDB_days)."
                    )
                self.abuseIPDB_days = abuseIPDB_days
            else:
                self.abuseIPDB_days = sys.argv[9]

        try:
            sys.argv[10]  #log_directory assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 parameters is missing (log_directory)."
            )
        else:
            if sys.argv[10] == "default":
                self.log_directory = "./logs"
            else:
                self.log_directory = sys.argv[10]

    def abuseIPDB_lookup(self, ipAddresses):
        abuseipdb_api = AbuseIPDB()
        ip_records = []

        for ip in ipAddresses:
            json_load = abuseipdb_api.check_ip(ip, self.abuseIPDB_days,
                                               self.abuse_apiV2_key)
            ip_records.append(abuseipdb_api.record_ip_data(json_load))

        return ip_records

    def record_search(self,
                      query_results,
                      joined_results,
                      console_show_join=False,
                      return_fileName=False):
        """
        Populates file with log info and the parameters passed in
        Input:
            query_results (List)     : List of dictionaries containing the results of the SumoLogic query
            joined_results (List)    : query_results with abuseIPDB results joined into each dictionary
            console_show_join (Bool) : Indicates whether joined_results will be printed in console
            return_fileName (Bool)   : Indicates whether file name of log is returned
        Return:
            file_name (String)       : The file name of the log that was just recorded
        """
        try:
            if not os.path.exists(self.log_directory):
                os.makedirs(self.log_directory)
        except OSError:
            print('Error: Creating directory. ' + self.log_directory)

        file_name = self.log_directory + "/output_" + self.current_date.strftime(
            "D%Y-%m-%d_") + self.current_date.strftime("T%H-%M-%S") + "_.txt"

        if console_show_join is True:
            print("Joined Results: " + '\n' + json.dumps(joined_results))

        file = open(file_name, "w")
        file.write("defaultQuery = " + str(self.defaultQuery) + '\n\n')

        if self.defaultQuery is False:
            file.write("Accessing Sumo Search Query in: " + str(sys.argv[1]) +
                       '\n\n')
        else:
            file.write("Accessing Sumo Search Query in: default_query.py" +
                       '\n\n')

        file.write("Query:\n" + self.query + '\n\n')
        file.write("From " + self.fromTime + " To " + self.toTime + '\n\n')
        file.write("Query Results:\n")
        file.write(json.dumps(query_results) + '\n\n')
        file.write("Joined Results:\n")
        file.write(json.dumps(joined_results))
        file.close()

        if return_fileName is True:
            return file_name

    def failed_search(self, exception, query_results, console_show_join=False):
        """
        Populates file with log info and the parameters passed in upon failed search and kills execution
        Input:
            exception (String)       : The Exception raised
            console_show_join (Bool) : Indicates whether joined_results will be printed in console
        Return (None):
            Kills execution after logging to file
        """
        try:
            if not os.path.exists(self.log_directory):
                os.makedirs(self.log_directory)
        except OSError:
            print('Error: Creating directory. ' + self.log_directory)

        file_name = self.log_directory + "/output_" + self.current_date.strftime(
            "D%Y-%m-%d_") + self.current_date.strftime("T%H-%M-%S") + "_.txt"
        file = open(file_name, "w")
        file.write("defaultQuery = " + str(self.defaultQuery) + '\n\n')

        if self.defaultQuery is False:
            file.write("Accessing Sumo Search Query in: " + str(sys.argv[1]) +
                       '\n\n')
        else:
            file.write("Accessing Sumo Search Query in: default_query.py" +
                       '\n\n')

        file.write("Query:\n" + self.query + '\n\n')
        file.write("From " + self.fromTime + " To " + self.toTime + '\n\n')
        file.write("Query Results:\n")
        file.write(json.dumps(query_results) + '\n\n')
        file.write("FAILED SEARCH" + '\n\n')
        file.write("EXCEPTION:\n" + str(exception) + '\n\n')

        file.close()

        sys.exit("Status Code 1\nFailed Search")

    def get_ip_list(self, query_results):
        """
        Parses through query_results for dictionaries and then scans for "src_ip" keys appending to a list
        Input:
            query_results (List) : List of dictionaries containing the results of the SumoLogic query
        Return:
            ip_list (List)       : List of ip addresses to send to abuseIPDB for lookup
        """
        ip_list = []
        for dictionary in query_results:
            for key, value in dictionary.items():
                if key == "src_ip":
                    ip_list.append(value)

        try:
            ip_list[0]
        except IndexError as e:
            self.failed_search(e, query_results, console_show_join=True)

        return ip_list

    @staticmethod
    def join(ip_records, query_results):
        """
        Takes list of SumoLogic dictionaries and joins them with the list of dictionaries from abuseIPDB's
        results based on ip lookup
        Input:
            ip_record (List)      : List of dictionaries containing abuseIPDB's ip lookups
            query_results (List)  : List of dictionaries containing the results of the SumoLogic query
        Return:
            joined_results (List) : List of dictionaries, the join between ip_record and query_results
        """
        joined_results = []
        for record, dictionary in zip(ip_records, query_results):
            record = record.getAttributes(includeIP=False)
            temp = {}
            for key, value in record.items():
                temp[key + " (abuseIPDB)"] = value
            join = {**dictionary, **temp}
            joined_results.append(join)
        return joined_results

    def get_query_results(self):
        """
        Runs query through SumoLogic to get results of query
        Input:
            sumo_api (Class)         : 'sumologic.sumologic.SumoLogic', Sumo Logic Python SDK, https://github.com/SumoLogic/sumologic-python-sdk
        Return:
            query_results (List)     : List of dictionaries containing the results of the SumoLogic query
        """
        if self.defaultQuery is False:
            print("Accessing Sumo Search Query in: " + str(sys.argv[1]) + '\n')
            print("Query:\n" + self.query)
            print("From " + self.fromTime + " To " + self.toTime + '\n')
            query_results = self.sumo_api.search(self.query,
                                                 fromTime=self.fromTime,
                                                 toTime=self.toTime,
                                                 timeZone=self.timeZone)
        else:
            print("Accessing Default Query in default_query.py\n")
            print("Query:\n" + self.query + '\n')
            print("From " + self.fromTime + " To " + self.toTime + '\n')
            try:
                query_results = self.sumo_api.search(self.query,
                                                     fromTime=self.fromTime,
                                                     toTime=self.toTime,
                                                     timeZone=self.timeZone)
            except Exception as e:
                self.failed_search(e, query_results, console_show_join=True)
        print("Query Results: \n" + str(query_results) + '\n')
        return query_results

    def post_to_http_source(self, joined_results):
        """
        Posts joined_results to a collector in Sumo sumologic
        Input:
            joined_results (List)            : query_results with abuseIPDB results joined into each dictionary
        Return (None):
            Posts joined_results to specified selector
        """
        if self.postAuthorization is True:
            sumo_api_post = SumoLogic(
                self.sumo_access_id,
                self.sumo_access_key,
                endpoint=
                "https://endpoint1.collection.us2.sumologic.com/receiver/v1/http/"
            )
            post_object = sumo_api_post.post(self.uniqueHTTPCollectorCode,
                                             joined_results)
            print('\n')
            print(post_object)
        else:
            print("\nPost authorization disabled.\n")
# Deletes all sources (not collectors) in a given category.
#
# python rm-src-by-cat.py <accessId> <accessKey> <category>

import sys

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
cat = args[3]
cs = sumo.collectors()

for c in cs:
    ss = sumo.sources(c['id'])
    for s in ss:
        if s['category'] == cat:
            sv, _ = sumo.source(c['id'], s['id'])
            print(sumo.delete_source(c['id'], sv).text)
Exemple #45
0
from sumologic import SumoLogic

if path.isfile("access.key"):
    cf = open("access.key", "r")
    creds = cf.readlines()
else:
    sys.exit(
        "access.key file missing. Place your accessId and accessKey on separate lines in this file."
    )

args = sys.argv
if len(args) < 3:
    sys.exit("usage: " + args[0] + " fromDate toDate [outFile]")

sumo = SumoLogic(creds[0].strip(), creds[1].strip())

fromTime = args[1]
toTime = args[2]
timeZone = 'UTC'
byReceiptTime = False
r_fields = ['_messagetime', 'msg']  # names of fields to include in output
try:
    outfile = args[3]
except IndexError:
    outfile = "sumo_results_" + fromTime + "_to_" + toTime + ".txt"

delay = 5
q = ' '.join(sys.stdin.readlines())
sj = sumo.search_job(q, fromTime, toTime, timeZone, byReceiptTime)
# Search-replaces monitor time windows.
#
# python mv-monitor-times.py <accessId> <accessKey> <oldWindow> <newWindow>

import sys

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
oldWindow = args[3]
newWindow = args[4]

ds = sumo.dashboards()
#
# Example:
#
# cat query.sumoql | python search-job.py <accessId> <accessKey> \
# 1408643380441 1408649380441 PST

import json
import sys
import time

from sumologic import SumoLogic

LIMIT = 42

args = sys.argv
sumo = SumoLogic(args[1], args[2])
fromTime = args[3]
toTime = args[4]
timeZone = args[5]

delay = 2
q = ' '.join(sys.stdin.readlines())
sj = sumo.search_job(q, fromTime, toTime, timeZone)

status = sumo.search_job_status(sj)
while status['state'] != 'DONE GATHERING RESULTS':
    if status['state'] == 'CANCELLED':
        break
    time.sleep(delay)
    delay *= 2
    status = sumo.search_job_status(sj)
# Get collectors where field contains some specified string
#
# python get-collectors.py <accessId/email> <accessKey/password> <field> <string>

import sys

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
field, string = args[3], args[4]
cs = sumo.collectors()

for c in cs:
	if field in c and string in c[field]:
		print c
Exemple #49
0
# Get collectors where field contains some specified string
#
# python get-collectors.py <accessId> <accessKey> <field> <string>

import sys

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
field, string = args[3], args[4]
cs = sumo.collectors()

for c in cs:
    if field in c and string in c[field]:
        print sumo.sources(c['id'])
# Sets an attribute across all collectors and sources in a given account.
#
# python bulk-set.py <accessId/email> <accessKey/password> <attribute> <value>

import pprint
import sys
import time

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
delay = 0.25
time.sleep(delay)
attr, val = args[3], args[4]
cs = sumo.collectors()
time.sleep(delay)
f = [
    {
        u"regexp": u"\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.(\\d{1,3})",
        u"mask": u"255",
        u"filterType": u"Mask",
        u"name": u"last octet mask",
    }
]

for c in cs:
    if "category" not in c or "bwe" not in c["category"] and "bwm" not in c["category"]:
        print "collector: " + c["name"]
        ss = sumo.sources(c["id"])
        time.sleep(delay)