Exemple #1
0
def get_sumologic_fields(config, q, from_time, to_time, time_zone):
    fields = []

    sumologic_access_id = config['sumologic_access_id']
    sumologic_access_key = config['sumologic_access_key']
    sumologic_root_url = config['sumologic_root_url']

    LOGGER.info("Run query in sumologic")
    sumo = SumoLogic(sumologic_access_id, sumologic_access_key, sumologic_root_url)

    delay = 5
    search_job = sumo.search_job(q, from_time, to_time, time_zone)

    status = sumo.search_job_status(search_job)
    while status['state'] != 'DONE GATHERING RESULTS':
        if status['state'] == 'CANCELLED':
            break
        time.sleep(delay)
        status = sumo.search_job_status(search_job)

    LOGGER.info(status['state'])

    if status['state'] == 'DONE GATHERING RESULTS':
        response = sumo.search_job_records(search_job, limit=1)

        fields = response['fields']

    return fields
 def __init__(self, connection, configuration):
     self.endpoint = (PROTOCOL + URL +
                      ENDPOINT) % (connection.get("region"))
     self.auth = configuration.get('auth')
     self.client = SumoLogic(self.auth.get("access_id"),
                             self.auth.get("access_key"),
                             endpoint=self.endpoint)
Exemple #3
0
def pingCollectors():
    config = ConfigParser.RawConfigParser()
    config.readfp(open('config.properties'))

    # SumoLogic Access ID and Access Key
    accessID = config.get("Config", "accessID")
    accessKey = config.get("Config", "accessKey")

    sumo = SumoLogic(accessID, accessKey)
    theBlob = sumo.collectors()
    outputMsg = "The following collectors are offline:\n"
    empty = True
    for stuff in theBlob:
        collectorName = ''
        collectorAlive = False
        for attribute, value in stuff.iteritems():
            if attribute == 'name':
                collectorName = value
            if attribute == 'alive':
                collectorAlive = value
        if collectorAlive == False:
            empty = False
            outputMsg += collectorName + "\n"
    if empty:
        return ''
    else:
        return outputMsg
Exemple #4
0
 def __init__(self, config):
     super(BaseAction, self).__init__(config)
     access_id = self.config.get('sumologic_access_id', None)
     access_key = self.config.get('sumologic_access_key', None)
     sumologic_collectors_limit = self.config.get(
         'sumologic_collectors_limit', 1000)
     self._sumologic_access_key = access_key or None
     self._sumologic_access_id = access_id or None
     self._sumologic_collectors_limit = sumologic_collectors_limit or 1000
     if self._sumologic_access_key is None or self._sumologic_access_id is None:
         self._client = None
     else:
         self._client = SumoLogic(self._sumologic_access_id,
                                  self._sumologic_access_key)
Exemple #5
0
def get_sumologic_records(config, q, from_time, to_time, time_zone, limit):
    records = []

    sumologic_access_id = config['sumologic_access_id']
    sumologic_access_key = config['sumologic_access_key']
    sumologic_root_url = config['sumologic_root_url']

    LOGGER.info("Run query in sumologic")
    sumo = SumoLogic(sumologic_access_id, sumologic_access_key, sumologic_root_url)

    now_datetime = datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')
    custom_columns = {
        '_SDC_EXTRACTED_AT': now_datetime,
        '_SDC_BATCHED_AT': now_datetime,
        '_SDC_DELETED_AT': None
    }
    delay = 5
    search_job = sumo.search_job(q, from_time, to_time, time_zone)
    LOGGER.info(search_job)

    status = sumo.search_job_status(search_job)
    while status['state'] != 'DONE GATHERING RESULTS':
        if status['state'] == 'CANCELLED':
            break
        time.sleep(delay)
        LOGGER.info(":check query status")
        status = sumo.search_job_status(search_job)
        LOGGER.info(status)

    LOGGER.info(status['state'])

    if status['state'] == 'DONE GATHERING RESULTS':
        record_count = status['recordCount']
        count = 0
        while count < record_count:
            LOGGER.info("Get records %d of %d, limit=%d", count, record_count, limit)
            response = sumo.search_job_records(search_job, limit=limit, offset = count)
            LOGGER.info("Got records %d of %d", count, record_count)

            recs = response['records']
            # extract the result maps to put them in the list of records
            for rec in recs:
                records.append({**rec['map'], **custom_columns})

            if len(recs) > 0:
                count = count + len(recs)
            else:
                break # make sure we exit if nothing comes back

    return records
 def post_to_http_source(self, joined_results):
     """
     Posts joined_results to a collector in Sumo sumologic
     Input:
         joined_results (List)            : query_results with abuseIPDB results joined into each dictionary
     Return (None):
         Posts joined_results to specified selector
     """
     if self.postAuthorization is True:
         sumo_api_post = SumoLogic(
             self.sumo_access_id,
             self.sumo_access_key,
             endpoint=
             "https://endpoint1.collection.us2.sumologic.com/receiver/v1/http/"
         )
         post_object = sumo_api_post.post(self.uniqueHTTPCollectorCode,
                                          joined_results)
         print('\n')
         print(post_object)
     else:
         print("\nPost authorization disabled.\n")
Exemple #7
0
 def __init__(self, access_id, access_key, deployment):
     self.deployment = deployment
     self.sumologic_cli = SumoLogic(access_id, access_key,
                                    self.api_endpoint)
 def __init__(self, props, *args, **kwargs):
     access_id, access_key, deployment = props["SumoAccessID"], props[
         "SumoAccessKey"], props["SumoDeployment"]
     self.deployment = deployment
     self.sumologic_cli = SumoLogic(access_id, access_key,
                                    self.api_endpoint)
# python usage-report.py <accessId/email> <accessKey/password> <orgId> <fromTime> <toTime> <timezone> <timeslice> <email>
#
# TODO per-source
# TODO log hook
# TODO delete jobs?

from email.mime.text import MIMEText
import json
from smtplib import SMTP
import sys

from sumologic import SumoLogic

args = sys.argv

sumo = SumoLogic(args[1], args[2], "https://long-api.sumologic.net/api/v1")
orgId = args[3]
fromTime = args[4]
toTime = args[5]
timezone = args[6]
timeslice = args[7]
fromEmail = '*****@*****.**'
toEmail = args[8]

lookup = "lookup/collector_name"

q = r"""
_sourceCategory=config "Collector by name and ID" !GURR "%s"
| parse "[logger=*]" as logger
| where logger = "scala.config.LoggingVisitor"
| parse "Collector by name and ID, id: '*', decimal: '*', name: '*', organization ID: '*', decimal: '*', organization name: '*', organization type: '*'"
Exemple #10
0
            traceback.print_exc()
        logger.exception("error generating sumo query " + str(file) + "----" +
                         str(e))
        with open(
                os.path.join(
                    args.outdir,
                    "sigma-" + file_basename + '-error-generation.txt'),
                "w") as f:
            # f.write(json.dumps(r, indent=4, sort_keys=True) + " ERROR: %s\n\nQUERY: %s" % (e, sumo_query))
            f.write(" ERROR for file: %s\n\Exception:\n %s" % (file, e))
        continue

    try:
        # Run query
        # https://github.com/SumoLogic/sumologic-python-sdk/blob/3ad8033deb028ac45ac4099f11c04785fa426f51/scripts/search-job.py
        sumo = SumoLogic(args.accessid, args.accesskey, args.endpoint)
        toTime = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S")
        fromTime = datetime.datetime.strptime(
            toTime, "%Y-%m-%dT%H:%M:%S") - datetime.timedelta(hours=24)
        fromTime = fromTime.strftime("%Y-%m-%dT%H:%M:%S")
        timeZone = 'UTC'
        byReceiptTime = True

        sj = sumo.search_job(sumo_query, fromTime, toTime, timeZone,
                             byReceiptTime)

        status = sumo.search_job_status(sj)
        while status['state'] != 'DONE GATHERING RESULTS':
            if status['state'] == 'CANCELLED':
                break
            time.sleep(delay)
 def connect(self, params):
     access_id = params.get("access_id").get("secretKey")
     access_key = params.get("access_key").get("secretKey")
     self.client = SumoLogic(access_id, access_key)
Exemple #12
0
#!/usr/bin/python

import os
import time
import datetime
from sumologic import SumoLogic

sumo = SumoLogic(os.environ["SUMO_ACCESS_ID"], os.environ["SUMO_ACCESS_KEY"], os.environ["SUMO_ENDPOINT"])

# --------------- INPUT 

with open("query.txt", "r") as query_file:
  query = query_file.read()

duration = 160 * 60 * 1000
startTimeStep = 30 * 60 * 1000

firstStartTime = 1574636400000
lastStartTime = 1575241200000

runCount = int((lastStartTime - firstStartTime) / startTimeStep)

# ---------------------

def startQuery(iteration):
  thisStartTime = firstStartTime + iteration * startTimeStep
  return sumo.search_job(query, fromTime = thisStartTime, toTime = thisStartTime + duration, timeZone = "CET", byReceiptTime = False)

def dateForIteration(iteration):
  return datetime.datetime.fromtimestamp((firstStartTime + iteration * startTimeStep) / 1000).strftime("%m/%d/%Y %I:%M %p")
Exemple #13
0
from sumologic import SumoLogic

if path.isfile("access.key"):
    cf = open("access.key", "r")
    creds = cf.readlines()
else:
    sys.exit(
        "access.key file missing. Place your accessId and accessKey on separate lines in this file."
    )

args = sys.argv
if len(args) < 3:
    sys.exit("usage: " + args[0] + " fromDate toDate [outFile]")

sumo = SumoLogic(creds[0].strip(), creds[1].strip())

fromTime = args[1]
toTime = args[2]
timeZone = 'UTC'
byReceiptTime = False
r_fields = ['_messagetime', 'msg']  # names of fields to include in output
try:
    outfile = args[3]
except IndexError:
    outfile = "sumo_results_" + fromTime + "_to_" + toTime + ".txt"

delay = 5
q = ' '.join(sys.stdin.readlines())
sj = sumo.search_job(q, fromTime, toTime, timeZone, byReceiptTime)
def sumo_query(**kwargs):
    # create variables based on user input
    query_file = kwargs.pop("query", None)
    access_id = kwargs.pop("access_id", None)
    access_key = kwargs.pop("access_key", None)
    endpoint = kwargs.pop("endpoint", None)
    from_time = kwargs.pop("from_time", None)
    to_time = kwargs.pop("to_time", None)
    by_receipt_time = kwargs.pop("by_receipt_time", None)
    timezone = kwargs.pop("timezone", None)
    verbose = kwargs.pop("verbose", None)
    LIMIT = kwargs.pop("limit", None)

    delay = 5
    query = query_file.read().decode()

    # create connection instance
    sumo = SumoLogic(access_id, access_key, endpoint)

    # create search job
    search_job = sumo.search_job(query, from_time, to_time, timezone,
                                 by_receipt_time)

    # create search job status object and check state of the search job
    search_job_status = sumo.search_job_status(search_job)
    while search_job_status["state"] != "DONE GATHERING RESULTS":
        if search_job_status["state"] == "CANCELLED":
            break
        time.sleep(delay)
        search_job_status = sumo.search_job_status(search_job)

    if search_job_status["state"] == "DONE GATHERING RESULTS":
        count = search_job_status["recordCount"]
        limit = count if count < LIMIT and count != 0 else LIMIT
        result = sumo.search_job_messages(search_job, limit=limit)

    messages = result["messages"]

    # print result of the search job
    num = 0
    for map in messages:
        # map_string = map['map']['_raw'].replace("\\n","").replace("\\t","").replace("\\","")
        map_string = map["map"]["_raw"]
        try:
            map_dict = eval(map_string)
        except NameError:
            map_dict = eval(
                map_string.replace("null",
                                   "None").replace("true", "True").replace(
                                       "false", "False"))
        # click.secho("//////// Docker info ////////", bg='white', fg='black')

        try:
            docker_dict = map_dict["message"]["docker"]
            click.secho("//////// Docker info ////////",
                        bg="white",
                        fg="black")
            pprint(docker_dict)

        except (KeyError, TypeError):
            click.secho("//////// Whole log message ////////",
                        bg="white",
                        fg="black")
            pprint(map_dict)
            click.secho(
                "\\\\\\\\\\\\\\\\ Whole log message \\\\\\\\\\\\\\\\",
                bg="white",
                fg="black",
            )
            print()
            print("########" * 10 + "   " + str(num) + "   " + "########" * 10)
            print()
            num += 1
            continue

        click.secho("\\\\\\\\\\\\\\\\ Docker info \\\\\\\\\\\\\\\\",
                    bg="white",
                    fg="black")

        click.secho("//////// Kubernetes info ////////",
                    bg="white",
                    fg="black")
        pprint(map_dict["message"]["kubernetes"])
        click.secho("\\\\\\\\\\\\\\\\ Kubernetes info \\\\\\\\\\\\\\\\",
                    bg="white",
                    fg="black")

        click.secho("//////// Log ////////", bg="white", fg="black")
        try:
            map_log_dict = eval(map_dict["message"]["log"])
        except SyntaxError:
            pprint(map_dict["message"]["log"], width=500)
            click.secho("\\\\\\\\\\\\\\\\ Log \\\\\\\\\\\\\\\\",
                        bg="white",
                        fg="black")
            print()
            print("########" * 10 + "   " + str(num) + "   " + "########" * 10)
            print()
            num += 1
            continue

        if type(map_log_dict) != dict:
            pprint(map_log_dict, width=500)
        else:
            if not verbose:
                for key, value in map_log_dict.items():
                    if key == "stacktrace" or key == "errorVerbose":
                        continue
                    else:
                        pprint(f"{key} : {map_log_dict[key]}", width=500)
            else:
                pprint(map_log_dict, width=500)
        click.secho("\\\\\\\\\\\\\\\\ Log \\\\\\\\\\\\\\\\",
                    bg="white",
                    fg="black")

        print()
        print("########" * 10 + "   " + str(num) + "   " + "########" * 10)
        print()
        num += 1
Exemple #15
0
def main():
    timezone = 'America/New_York'

    # Flag to use the by receipt time (_receiptTime), instead of the by message time (_messageTime) [default: 'false']
    byReceiptTime = 'false'

    # Getting the current timestamp, in epoch format
    date_to = datetime.datetime.now().strftime('%s')

    # Getting the current timestamp, minus the delta hours, in epoch format
    date_from = datetime.datetime.now() - datetime.timedelta(hours=24)
    date_from = date_from.strftime('%s')

    # Defining new Sumo Logic search job object
    sumo = SumoLogic(cip_access_id, cip_access_key)

    # Creating the search job
    search = sumo.search_job(cip_query, date_from, date_to, timezone,
                             byReceiptTime)

    # Verifying the status of the search job
    status = sumo.search_job_status(search)

    while status['state'] != 'DONE GATHERING RESULTS':
        if status['state'] == 'CANCELLED':
            break
        time.sleep(delay)
        status = sumo.search_job_status(search)
        '''
        Section to handle messages
        '''
        if status['state'] == 'DONE GATHERING RESULTS':

            # get results of cat on lookup table
            count = status['messageCount']
            limit = count if count < 1000 and count != 0 else 1000  # compensate bad limit check
            results = sumo.search_job_messages(search, limit=limit)
            messages = results['messages']

            # set parameters for uploading to CSE
            batch_size = 25
            expiration = (
                datetime.datetime.now() +
                datetime.timedelta(days=30)).strftime('%Y-%m-%dT%H:%M:%SZ')

            # uploading to CSE in batches of 25
            for i in range(0, len(messages), batch_size):
                batch = messages[i:i + batch_size]
                items = {'items': []}
                for message in batch:
                    record = message['map']
                    items['items'].append({
                        'active': True,
                        'description': 'Automated update.',
                        'expiration': expiration,
                        'value': record[field_for_matchlist]
                    })
                r = requests.post(url=cse_url, headers=cse_headers, json=items)
                if r.status_code > 201:
                    error = r.text
                    status_code = r.status_code
                    logging.error(f'Status Code: {status_code}')
                    logging.error(f'Error uploading items: {error}')
                else:
                    logging.info('Submitted 25 match list values.')
                    pass
    def __init__(self):
        """
        Checks for command line arguements
        Input (None):
            sys.argv[]                       : System command line arguemnts
        Return (None):
            query (String)                   : The SumoLogic query
            defaultQuery (Bool)              : Indicates if default query was executed
            fromTime (String)                : Date for query to start search
            toTime (String)                  : Date for query to end search
            timeZone (String)                : Timezone so query knows what timezone to align its search times to
            current_date (String)            : Today's date
            uniqueHTTPCollectorCode (String) : The code for the collector to post to
            sumo_access_id (String)          : Needed to access SumoLogic through Sumo Logic Python SDK
            sumo_access_key (String)         : Needed to access SumoLogic through Sumo Logic Python SDK
            abuse_apiV2_key (String)         : Needed to access AbuseIPDB api
            abuseIPDB_days (String)          : Number of days AbuseIPDB api will use when looking up ip
        """
        try:
            sys.argv[1]  #query assignment
        except:
            sys.exit("Status Code 1\n1 parameters is missing (query).")
        else:
            if sys.argv[1] == "default-all":
                self.defaultQuery = True
                from default_query import query
                if not query:
                    sys.exit(
                        "Status Code 1\n1 parameters is missing in defaults (query)."
                    )
                self.query = query
                from default_query import current_date, fromTime, toTime, timeZone
                if not fromTime or not toTime or not timeZone or not current_date:
                    sys.exit(
                        "Status Code 1\nAt least 1 time parameters is missing in defaults (fromTime, toTime, timeZone, or current_date)."
                    )
                self.fromTime = fromTime
                self.toTime = toTime
                self.timeZone = timeZone
                self.current_date = current_date
                from sumo_collector_code import uniqueHTTPCollectorCode
                if not uniqueHTTPCollectorCode:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (uniqueHTTPCollectorCode)."
                    )
                self.uniqueHTTPCollectorCode = uniqueHTTPCollectorCode
                self.postAuthorization = True
                from api_keys import sumo_access_id, sumo_access_key
                if not sumo_access_id or not sumo_access_key:
                    sys.exit(
                        "Status Code 1\nAt least 1 sumo access parameters is missing  in defaults (sumo_access_id or sumo_access_key)."
                    )
                self.sumo_access_id = sumo_access_id
                self.sumo_access_key = sumo_access_key
                self.sumo_api = SumoLogic(sumo_access_id, sumo_access_key)
                from api_keys import abuse_apiV2_key
                if not abuse_apiV2_key:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (abuse_apiV2_key)."
                    )
                self.abuse_apiV2_key = abuse_apiV2_key
                from abuseipdb_parameters import abuseIPDB_days
                if not abuseIPDB_days:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (abuseIPDB_days)."
                    )
                self.abuseIPDB_days = abuseIPDB_days
                self.log_directory = "./logs"
                return
            elif sys.argv[1] == "default":
                self.defaultQuery = True
                from default_query import query
                if not query:
                    sys.exit(
                        "Status Code 1\n1 parameters is missing in defaults (query)."
                    )
                self.query = query
            else:
                self.defaultQuery = False
                with open(str(sys.argv[1])) as file:
                    self.query = str(file.read())

        try:
            sys.argv[2]  #fromTime assignment
            sys.argv[3]  #toTime assignment
            sys.argv[4]  #timeZone assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 time parameters is missing (fromTime, toTime, or timeZone)."
            )
        else:
            if sys.argv[2] == sys.argv[3] == sys.argv[4] == "default":
                from default_query import current_date, fromTime, toTime, timeZone
                if not fromTime or not toTime or not timeZone or not current_date:
                    sys.exit(
                        "Status Code 1\nAt least 1 time parameters is missing in defaults (fromTime, toTime, timeZone, or current_date)."
                    )
                self.fromTime = fromTime
                self.toTime = toTime
                self.timeZone = timeZone
                self.current_date = current_date
            elif sys.argv[2] or sys.argv[
                    3] or sys.argv[4] != "default" and sys.argv[2] or sys.argv[
                        3] or sys.argv[4] == "default":
                sys.exit(
                    "Status Code 1\nAll parameters must be default or inputted (fromTime, toTime, or timeZone)."
                )
            else:
                from default_query import current_date
                if not current_date:
                    sys.exit(
                        "Status Code 1\nAt least 1 time parameters is missing in defaults (current_date)."
                    )
                self.fromTime = sys.argv[2]
                self.toTime = sys.argv[3]
                self.timeZone = sys.argv[4]
                self.current_date = current_date

        try:
            sys.argv[5]  #uniqueHTTPCollectorCode assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 parameters is missing (uniqueHTTPCollectorCode)."
            )
        else:
            if sys.argv[5] == "default":
                from sumo_collector_code import uniqueHTTPCollectorCode
                if not uniqueHTTPCollectorCode:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (uniqueHTTPCollectorCode)."
                    )
                self.uniqueHTTPCollectorCode = uniqueHTTPCollectorCode
                self.postAuthorization = True
            elif sys.argv[5] == "False":
                self.uniqueHTTPCollectorCode = ""
                self.postAuthorization = False
            else:
                self.uniqueHTTPCollectorCode = sys.argv[5]
                self.postAuthorization = True

        try:
            sys.argv[6]  #sumo_access_id assignment
            sys.argv[7]  #sumo_access_key assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 sumo access parameters is missing (sumo_access_id or sumo_access_key)."
            )
        else:
            if sys.argv[6] == sys.argv[7] == "default":
                from api_keys import sumo_access_id, sumo_access_key
                if not sumo_access_id or not sumo_access_key:
                    sys.exit(
                        "Status Code 1\nAt least 1 sumo access parameters is missing  in defaults (sumo_access_id or sumo_access_key)."
                    )
                self.sumo_access_id = sumo_access_id
                self.sumo_access_key = sumo_access_key
                self.sumo_api = SumoLogic(sumo_access_id, sumo_access_key)
            elif sys.argv[6] == "default" and sys.argv[
                    7] != "default" or sys.argv[6] != "default" and sys.argv[
                        7] == "default":
                sys.exit(
                    "Status Code 1\nBoth parameters must be default or inputted (sumo_access_id or sumo_access_key)."
                )
            else:
                self.sumo_access_id = sys.argv[6]
                self.sumo_access_key = sys.argv[7]
                self.sumo_api = SumoLogic(self.sumo_access_id,
                                          self.sumo_access_key)

        try:
            sys.argv[8]  #abuse_apiV2_key assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 parameters is missing (abuse_apiV2_key)."
            )
        else:
            if sys.argv[8] == "default":
                from api_keys import abuse_apiV2_key
                if not abuse_apiV2_key:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (abuse_apiV2_key)."
                    )
                self.abuse_apiV2_key = abuse_apiV2_key
            else:
                self.abuse_apiV2_key = sys.argv[8]

        try:
            sys.argv[9]  #abuseIPDB_days assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 parameters is missing (abuseIPDB_days)."
            )
        else:
            if sys.argv[9] == "default":
                from abuseipdb_parameters import abuseIPDB_days
                if not abuseIPDB_days:
                    sys.exit(
                        "Status Code 1\nAt least 1 parameters is missing in defaults (abuseIPDB_days)."
                    )
                self.abuseIPDB_days = abuseIPDB_days
            else:
                self.abuseIPDB_days = sys.argv[9]

        try:
            sys.argv[10]  #log_directory assignment
        except:
            sys.exit(
                "Status Code 1\nAt least 1 parameters is missing (log_directory)."
            )
        else:
            if sys.argv[10] == "default":
                self.log_directory = "./logs"
            else:
                self.log_directory = sys.argv[10]
 def connect(self, params):
     access_id = params.get('access_id').get('secretKey')
     access_key = params.get('access_key').get('secretKey')
     self.client = SumoLogic(access_id, access_key)
Exemple #18
0
# Get collectors where field contains some specified string
#
# python get-collectors.py <accessId> <accessKey> <field> <string>

import sys

from sumologic import SumoLogic

args = sys.argv
sumo = SumoLogic(args[1], args[2])
field, string = args[3], args[4]
cs = sumo.collectors()

for c in cs:
    if field in c and string in c[field]:
        print sumo.sources(c['id'])
from decouple import config
from vmware.vapi.vsphere.client import create_vsphere_client
from sumologic import SumoLogic
import sys

# Vsphere
vc_username = config('VC_USERNAME')
vc_userpass = config('VC_USERPASS')
vsphere1_host = config('VSPHERE1_HOST')
vsphere2_host = config('VSPHERE2_HOST')
# Sumo
sumo_access_id = config('SUMO_ACCESS_ID')
sumo_access_key = config('SUMO_ACCESS_KEY')

sumo = SumoLogic(sumo_access_id, sumo_access_key)


def delete_collector(sumo_collector):
    url = f"https://api.us2.sumologic.com/api/v1/collectors/{sumo_collector['id']}"
    r = requests.delete(url, auth=(sumo_access_id, sumo_access_key))
    print(
        f"I: Deleting {sumo_collector['id']} {sumo_collector['name']} rsp: {r}"
    )
    return r


def delete_powered_off_vm_sumo_collectors(sumo_collectors, vms):
    for sumo_collector in sumo_collectors:
        if sumo_collector['alive'] == False:
            match_ratio, match_vm = get_vm_name_in_sumo_collector_name_ratio(