コード例 #1
0
def main():
    band_dict = {}
    try:
        with open('max_id.txt', 'r') as target:
            id_max_old = target.readline()
            id_max_old = id_max_old.strip()
    except:
        id_max_old = 0
    id_max_new = id_max_old
    c = CbEnterpriseResponseAPI(profile="default")
    sensor_list = c.select(Sensor)
    for sensor in sensor_list:
        if sensor.queued_stats:
            try:
                for i in sensor.queued_stats:
                    if i['id'] > id_max_old:
                        if i['id'] > id_max_new:
                            id_max_new = i['id']
                        dt = i['timestamp'][0:19]
                        sn = sensor.computer_name
                        event_bytes = int(i['num_eventlog_bytes'])
                        binary_bytes = int(i['num_storefile_bytes'])
                        print "%s,%s,%d,%d" % (dt, sn, event_bytes, binary_bytes)
            except AttributeError:
                pass
    
    with open('max_id.txt', 'w') as target:
        target.write(id_max_new)
コード例 #2
0
def main():
    parser = build_cli_parser("Process utility")
    args = parser.parse_args()

    # BEGIN Common
    if args.prefix:
        output_filename = '{0}-processes.csv'.format(args.prefix)
    else:
        output_filename = 'processes.csv'

    if args.append == True or args.queryfile is not None:
        file_mode = 'a'
    else:
        file_mode = 'w'

    if args.days:
        query_base = ' start:-{0}m'.format(args.days * 1440)
    elif args.minutes:
        query_base = ' start:-{0}m'.format(args.minutes)
    else:
        query_base = ''

    if args.profile:
        cb = CbEnterpriseResponseAPI(profile=args.profile)
    else:
        cb = CbEnterpriseResponseAPI()

    queries = []
    if args.query:
        queries.append(args.query)
    elif args.queryfile:
        with open(args.queryfile, 'r') as f:
            for query in f.readlines():
                queries.append(query.strip())
        f.close()
    else:
        queries.append('')
    # END Common

    output_file = open(output_filename, file_mode)
    writer = csv.writer(output_file)
    writer.writerow([
        "proc_timestamp", "proc_hostname", "proc_username", "proc_path",
        "proc_cmdline", "proc_md5", "proc_child_count", "proc_filemod_count",
        "proc_modload_count", "proc_netconn_count", "proc_url"
    ])

    for query in queries:
        result_set = process_search(cb, query, query_base)

        for row in result_set:
            if _python3 == False:
                row = [
                    col.encode('utf8') if isinstance(col, unicode) else col
                    for col in row
                ]
            writer.writerow(row)

    output_file.close()
コード例 #3
0
def get_cbapi(splunk_service):
    if not splunk_service:
        return CbEnterpriseResponseAPI()
    else:
        cb_server, token = get_creds(splunk_service)
        return CbEnterpriseResponseAPI(token=token,
                                       url=cb_server,
                                       ssl_verify=False)
コード例 #4
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("--profile",
                        type=str,
                        action="store",
                        help="The credentials.response profile to use.")
    parser.add_argument("--debug",
                        action="store_true",
                        help="Write additional logging info to stdout.")
    parser.add_argument("--max-threads",
                        type=int,
                        action="store",
                        default=5,
                        help="Maximum number of concurrent threads.")

    # Sensor query paramaters
    s = parser.add_mutually_exclusive_group(required=False)
    s.add_argument("--group-id",
                   type=int,
                   action="store",
                   help="Target sensor group based on numeric ID.")
    s.add_argument("--hostname",
                   type=str,
                   action="store",
                   help="Target sensor matching hostname.")
    s.add_argument("--ipaddr",
                   type=str,
                   action="store",
                   help="Target sensor matching IP address (dotted quad).")

    # Options specific to this script
    parser.add_argument("--disable-smb1",
                        action="store_true",
                        help="If SMB1 is enabled, disable it.")

    args = parser.parse_args()

    if args.profile:
        cb = CbEnterpriseResponseAPI(profile=args.profile)
    else:
        cb = CbEnterpriseResponseAPI()

    query_base = None
    if args.group_id:
        query_base = 'groupid:%s' % args.group_id
    elif args.hostname:
        query_base = 'hostname:%s' % args.hostname
    elif args.ipaddr:
        query_base = 'ipaddr:%s' % args.ipaddr

    process_sensors(cb,
                    query_base=query_base,
                    update=args.disable_smb1,
                    max_threads=args.max_threads,
                    debug=args.debug)
コード例 #5
0
def main():
    parser = build_cli_parser("USB utility")

    # Output options
    parser.add_argument("--timestamps",
                        action="store_true",
                        help="Include timestamps in results.")

    args = parser.parse_args()

    if args.queryfile:
        sys.exit("queryfile not supported in this utility")

    if args.prefix:
        output_filename = '%s-usbstor.csv' % args.prefix
    else:
        output_filename = 'usbstor.csv'

    if args.profile:
        cb = CbEnterpriseResponseAPI(profile=args.profile)
    else:
        cb = CbEnterpriseResponseAPI()

    output_file = open(output_filename, 'w')
    writer = csv.writer(output_file, quoting=csv.QUOTE_ALL)

    header_row = ['endpoint', 'vendor', 'product', 'version', 'serial']
    if args.timestamps == True:
        header_row.insert(0, 'timestamp')
    writer.writerow(header_row)

    for term in search_terms:
        query = 'process_name:ntoskrnl.exe regmod:%s' % term

        if args.days:
            query += ' last_update:-%dm' % (args.days * 1440)
        elif args.minutes:
            query += ' last_update:-%dm' % args.minutes

        results = usbstor_search(cb,
                                 query,
                                 query_base=args.query,
                                 timestamps=args.timestamps)

        for row in results:
            if _python3 == False:
                row = [
                    col.encode('utf8') if isinstance(col, unicode) else col
                    for col in list(row)
                ]
            writer.writerow(row)

    output_file.close()
コード例 #6
0
def get_cb_response_object(args):
    if args.verbose:
        import logging
        logging.basicConfig()
        logging.getLogger("cbapi").setLevel(logging.DEBUG)
        logging.getLogger("__main__").setLevel(logging.DEBUG)

    if args.cburl and args.apitoken:
        cb = CbEnterpriseResponseAPI(args.cburl, args.apitoken)
    else:
        cb = CbEnterpriseResponseAPI(profile=args.profile)

    return cb
コード例 #7
0
def get_cb_response_object(args):
    if args.verbose:
        logging.basicConfig()
        logging.getLogger("cbapi").setLevel(logging.DEBUG)
        logging.getLogger("__main__").setLevel(logging.DEBUG)

    if args.cburl and args.apitoken:
        cb = CbEnterpriseResponseAPI(url=args.cburl,
                                     token=args.apitoken,
                                     ssl_verify=(not args.no_ssl_verify))
    else:
        cb = CbEnterpriseResponseAPI(profile=args.profile)

    return cb
コード例 #8
0
def main():
	c = CbEnterpriseResponseAPI(profile="default")
	sensor = c.select(Sensor).first()
	field_list = []
	for k in sorted(sensor._info):
		field_list.append(k)
	wr.writerow(field_list)

	sensors = c.select(Sensor)
	for sensor in sensors:
		sensor_details = []
		for i in field_list:
			sensor_details.append(getattr(sensor, i, ""))
		wr.writerow(sensor_details)
コード例 #9
0
def connect_callback(cb, line):
    try:
        sensor_id = int(line)
    except ValueError:
        sensor_id = None

    cb = CbEnterpriseResponseAPI()

    if not sensor_id:
        q = cb.select(Sensor).where("hostname:{0}".format(line))
        sensor = q.first()
    else:
        sensor = cb.select(Sensor, sensor_id)

    return sensor
コード例 #10
0
    def connect(self, params):
        url = params.get("url")
        token = params.get("api_key").get("secretKey")
        ssl_verify = False

        self.logger.info("Connect: Connecting...")
        self.carbon_black = CbEnterpriseResponseAPI(url=url, token=token, ssl_verify=ssl_verify)
コード例 #11
0
    def connect(self, params={}):
        """ Connect uses the carbon black credentials to get the latest api token for the user """
        url = params.get('url')
        token = params.get('api_key').get('secretKey')
        ssl_verify = params.get('ssl_verify')

        try:
            self.carbon_black = CbEnterpriseResponseAPI(
                url=url, token=token, ssl_verify=ssl_verify,
                max_retries=2)  # Two retries to speed up a likely failure
        except UnauthorizedError as e:
            raise ConnectionTestException(
                preset=ConnectionTestException.Preset.API_KEY) from e
        except ApiError as e:
            raise ConnectionTestException(
                preset=ConnectionTestException.Preset.NOT_FOUND) from e
        else:
            self.connection_test_passed = True
コード例 #12
0
    while status != desired_status and time.time() - start_time < timeout:
        res = cb.get_object(url)
        if res["status"] == desired_status:
            return res
        elif res["status"] == "error":
            raise LiveResponseError(res)
        else:
            time.sleep(delay)

    raise TimeoutError(uri=url, message="timeout polling for Live Response")


if __name__ == "__main__":
    from cbapi.response import CbEnterpriseResponseAPI
    import logging
    root = logging.getLogger()
    root.addHandler(logging.StreamHandler())

    logging.getLogger("cbapi").setLevel(logging.DEBUG)

    c = CbEnterpriseResponseAPI()
    j = GetFileJob(r"c:\test.txt")
    with c.select(Sensor, 3).lr_session() as lr_session:
        file_contents = lr_session.get_file(r"c:\test.txt")

    future = c.live_response.submit_job(j.run, 3)
    wait([
        future,
    ])
    print(future.result())
コード例 #13
0
def main():

    #
    # Disable requests insecure warnings
    #
    disable_insecure_warnings()

    #
    # parse arguments
    #
    parser = build_cli_parser("New Binaries with Netconns")
    parser.add_argument("-d", "--date-to-query", action="store", dest="date",
                      help="New since DATE, format YYYY-MM-DD")
    parser.add_argument("-f", "--output-file", action="store", dest="output_file",
                        help="output file in csv format")

    opts = parser.parse_args()
    if not opts.date:
        parser.print_usage()
        sys.exit(-1)
    #
    # Initalize the cbapi-ng
    # TODO get_cb_object
    #
    cb = CbEnterpriseResponseAPI()

    #
    # Main Query
    #
    start_date = "[" + opts.date + "T00:00:00 TO *]"
    binary_query = cb.select(Binary).where(("host_count:[1 TO 3]"
                                            " server_added_timestamp:" + start_date +
                                            " -observed_filename:*.dll"
                                            " -digsig_publisher:Microsoft*"
                                            " -alliance_score_srstrust:*"))
    #
    # Setup the csv writer
    #
    if not opts.output_file:
        output_file = open("new_binaries_with_netconns.csv", 'wb')
    else:
        output_file = open(opts.output_file, 'wb')
    csv_writer = csv.writer(output_file)
    #
    # Write out CSV header
    #
    csv_writer.writerow(("FileName", "Hostname", "Username", "Network Connections",
                         "Process Link", "Binary Link", "Binary MD5", "Signature Status", "Company",
                         "Observed Date", "Host Count", "Binary TimeStamp"))

    #
    # Create Progress Bar
    #
    pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=len(binary_query)).start()

    for i, binary in enumerate(binary_query):

        #
        # Update progress bar
        #
        pbar.update(i + 1)

        #
        # Retrieve the binary timestamp
        #
        binary_timestamp = time.asctime(time.gmtime(pefile.PE(data=binary.file.read()).FILE_HEADER.TimeDateStamp))

        #
        # Build a sub query to see if this binary was executed and had netconns
        #
        sub_query = "process_md5:" + binary.md5 + " netconn_count:[1 TO *]"
        process_query = cb.select(Process).where(sub_query)

        #
        # Iterate through results
        #
        for process in process_query:

            #
            # Write out the result
            #
            csv_writer.writerow((process.path,
                                 process.hostname,
                                 process.username,
                                 process.netconn_count,
                                 process.webui_link,
                                 binary.webui_link,
                                 binary.md5,
                                 binary.digsig_result if binary.digsig_result else "UNSIGNED",
                                 binary.company_name,
                                 binary.server_added_timestamp,
                                 binary.host_count,
                                 binary_timestamp))
    pbar.finish()
コード例 #14
0
#%%
import time
import os
import codecs
from datetime import datetime, timedelta
import pandas as pd
import pprint
from cbapi.response import CbEnterpriseResponseAPI, Sensor, SensorGroup, Process

#%%
fields = ['process_name', 'cmdline', 'parent_name', 'modload_count','netconn_count',\
         'filemod_count','crossproc_count', 'childproc_count', 'group', 'hostname', \
         'last_update', 'start', 'id']

cb = CbEnterpriseResponseAPI()
query = cb.select(Process)
query = query.where(
    "process_name:lsass.exe AND (crossproc_type:processopentarget or crossproc_type:remotethreadtarget)"
)
query = query.group_by('id')
query = query.min_last_update(datetime.today() - timedelta(days=100))
print('running query')
results = []
crossprocs = []
for process in query:
    results.append(process)
    crossprocs = crossprocs + ([[process._info['process_name'],crossproc.source_path,\
                                crossproc.target_path, crossproc.type, crossproc.privileges,\
                                process._info['hostname'], process._info['id']]\
                                for crossproc in process.all_crossprocs() \
                                if crossproc.target_path.endswith('lsass.exe')])
コード例 #15
0
# This is a forceful deletion in that the script kills (.exe) processes in the pre-defined file or path before deleting.
# Could exit with an error if:
#                               - cb.exe is not running with the highest level of permission.
#                               - Another running .exe prevents an .exe from being deleted (self-defense or reliance)
#                               - A windows exception is thrown for any other rare reason.
#                               - The live-response session to the sensor has a timeout.
#
# File: Forcefully_Delete_Path_Or_File.py
# Date: 06/19/2017
# Author: Jared F

import time
import os
from cbapi.response import CbEnterpriseResponseAPI, Sensor

c = CbEnterpriseResponseAPI()

p = (
    r"C://Users//user//Desktop//MalwareFolder"
)  # Path to delete, r character before ensures slashes are treated correctly
sensors = c.select(
    Sensor,
    1)  # Here we define 1 or more sensors we want to delete a file / path on

s = sensors  # We'd use this if only checking one sensor
# for s in sensors:  # We'd use this if sensors was a list, not a single sensor

print("[INFO] Establishing session to CB Sensor #" + str(s.id))

try:
    session = c.live_response.request_session(s.id)
コード例 #16
0
def main():
    """ Main function for standardization.
    """
    parser = cli.parser("Carbon Black Live Response Universal Triage Script.")
    args = parser.parse_args()

    if args.verbose:
        logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
                            level=logging.DEBUG)
    else:
        logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
                            level=logging.INFO)

    if args.profile:
        cber = CbEnterpriseResponseAPI(profile=args.profile)
    else:
        cber = CbEnterpriseResponseAPI()

    triage.ROOT_DIR = args.profile.upper()
    if not os.path.exists(triage.ROOT_DIR):
        os.mkdir(triage.ROOT_DIR)

    TriageScript = cli.tdict[args.tscript]
    TriageScript.init_check()

    if not os.path.exists("{0}\\{1}".format(triage.ROOT_DIR,
                                            TriageScript.path)):
        os.mkdir("{0}\\{1}".format(triage.ROOT_DIR, TriageScript.path))

    jobs = []

    if args.sensorid:
        sensor = cber.select(Sensor, args.sensorid)
        call_triage(cber, TriageScript, sensor, jobs, args.sensorid, args.nics)

    elif args.hostname:
        sensor = cber.select(Sensor).where("hostname:{0}".format(
            args.hostname)).first()
        call_triage(cber, TriageScript, sensor, jobs, args.hostname, args.nics)

    elif args.ipaddress:
        sensor = cber.select(Sensor).where("ip:{0}".format(
            args.ipaddress)).first()
        call_triage(cber, TriageScript, sensor, jobs, args.ipaddress,
                    args.nics)

    elif args.allhosts:
        for sensor in cber.select(Sensor):
            call_triage(cber, TriageScript, sensor, jobs, sensor.id, args.nics)

    else:
        if not os.path.isfile(args.sensorlist):
            tprint("File not present - " + args.sensorlist)
            return 0

        csv_dict = csv.DictReader(open(args.sensorlist), delimiter=',')
        sid = check_csv_header(csv_dict, args)

        for row in csv_dict:
            if "id" in sid:
                sensor = cber.select(Sensor, row[sid])
            elif "computer_name" in sid:
                sensor = cber.select(Sensor).where("hostname:{0}".format(
                    row[sid])).first()
            elif "ip" in sid:
                sensor = cber.select(Sensor).where("ip:{0}".format(
                    row[sid])).first()
            else:
                return 0

            call_triage(cber, TriageScript, sensor, jobs, row[sid], args.nics)

    wait(jobs)
コード例 #17
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("--profile", type=str, action="store",
                        help="The credentials.response profile to use.")

    # File output
    parser.add_argument("--prefix", type=str, action="store",
                        help="Output filename prefix.")

    # Cb Response Sensor query paramaters
    s = parser.add_mutually_exclusive_group(required=False)
    s.add_argument("--group-id", type=int,  action="store",
                        help="Target sensor group based on numeric ID.")
    s.add_argument("--hostname", type=str,  action="store",
                        help="Target sensor matching hostname.")
    s.add_argument("--ip", type=str,  action="store",
                        help="Target sensor matching IP address (dotted quad).")

    # Health checking
    parser.add_argument("--process-count", action="store_true",
                        help="Count processes associated with this sensor.")
    parser.add_argument("--tamper-count", action="store_true",
                        help="Count tamper events associated with this sensor.")

    parser.add_argument("--checkin-ip", action="store_true",
                        help="Return the latest public IP associated with the sensor.")

    args = parser.parse_args()

    if args.prefix:
        output_filename = '%s-sensors.csv' % args.prefix
    else:
        output_filename = 'sensors.csv'

    if args.profile:
        cb = CbEnterpriseResponseAPI(profile=args.profile)
    else:
        cb = CbEnterpriseResponseAPI()

    output_file = open(output_filename, 'w')
    writer = csv.writer(output_file, quoting=csv.QUOTE_ALL)

    header_row = ['computer_name', 
                  'computer_dns_name',
                  'sensor_group_id',
                  'os',
                  'os_type',
                  'computer_sid',
                  'last_checkin_time',
                  'registration_time',
                  'network_adapters',
                  'id',
                  'group_id',
                  'group_name',
                  'num_eventlog_mb',
                  'num_storefiles_mb',
                  'systemvolume_free_size',
                  'systemvolume_total_size',
                  'health',
                  'commit_charge_mb',
                  'build_version_string',
                  'process_count',
                  'tamper_count',
                  'clock_delta',
                  'checkin_ip']
    writer.writerow(header_row)

    query_base = None
    if args.group_id:
        query_base = 'groupid:{0}'.format(args.group_id)
    elif args.hostname:
        query_base = 'hostname:{0}'.format(args.hostname)
    elif args.ip:
        query_base = 'ip:{0}'.format(args.ip)

    if query_base is None:
        sensors = cb.select(Sensor)
    else:
        sensors = cb.select(Sensor).where(query_base)

    num_sensors = len(sensors)
    log_info("Found {0} sensors".format(num_sensors))

    counter = 1
    for sensor in sensors:
        if counter % 10 == 0:
            print("{0} of {1}".format(counter, num_sensors))

        if len(sensor.resource_status) > 0:
            commit_charge = "{0:.2f}".format(float(sensor.resource_status[0]['commit_charge'])/1024/1024)
        else:
            commit_charge = ''
        num_eventlog_mb = "{0:.2f}".format(float(sensor.num_eventlog_bytes)/1024/1024)
        num_storefiles_mb = "{0:.2f}".format(float(sensor.num_storefiles_bytes)/1024/1024)
        systemvolume_free_size = "{0:.2f}".format(float(sensor.systemvolume_free_size)/1024/1024)
        systemvolume_total_size = "{0:.2f}".format(float(sensor.systemvolume_total_size)/1024/1024)

        if args.process_count == True:
            process_count = len(cb.select(Process).where('sensor_id:{0}'.format(sensor.id)))
        else:
            process_count = ''

        if args.checkin_ip == True:
            try:
                checkin_ip = cb.select(Process).where('sensor_id:{0}'.format(sensor.id)).first().comms_ip
            except AttributeError:
                checkin_ip = ''
        else:
            checkin_ip = ''

        if args.tamper_count == True:
            tamper_count = len(cb.select(Process).where('tampered:true AND sensor_id:{0}'.format(sensor.id)))
        else:
            tamper_count = ''

        output_fields = [sensor.computer_name.lower(),
                         sensor.computer_dns_name.lower(),
                         sensor.group_id,
                         sensor.os,
                         sensor.os_type,
                         sensor.computer_sid,
                         sensor.last_checkin_time,
                         sensor.registration_time,
                         sensor.network_adapters,
                         sensor.id,
                         sensor.group_id,
                         sensor.group.name,
                         num_eventlog_mb,
                         num_storefiles_mb,
                         systemvolume_free_size,
                         systemvolume_total_size,
                         sensor.sensor_health_message,
                         commit_charge,
                         sensor.build_version_string,
                         process_count,
                         tamper_count,
                         sensor.clock_delta,
                         checkin_ip]

        if _python3 == False:
            row = [col.encode('utf8') if isinstance(col, unicode) else col for col in output_fields]
        else:
            row = output_fields
        writer.writerow(row)

        counter += 1

    output_file.close()
コード例 #18
0
def main():

    #
    # Disable requests insecure warnings
    #
    disable_insecure_warnings()

    #
    # Parse arguments
    #
    parser = build_cli_parser("System Check After Specified Date")
    parser.add_argument("-d", "--date-to-query", action="store", dest="date",
                      help="New since DATE, format YYYY-MM-DD")
    parser.add_argument("-f", "--output-file", action="store", dest="output_file",
                        help="output file in csv format")

    opts = parser.parse_args()
    if not opts.date:
        parser.print_usage()
        sys.exit(-1)

    #
    # Setup cbapi-ng
    # TODO get_cb_object
    cb = CbEnterpriseResponseAPI()

    #
    # query for all processes that match our query
    #
    print "Performing Query..."
    query = "filewrite_md5:* last_update:[" + opts.date + "T00:00:00 TO *]"
    process_query = cb.select(Process).where(query)

    #
    # Create a set so we don't have duplicates
    #
    md5_list = set()

    #
    # Iterate through all the processs
    #
    for proc in process_query:
        #
        # Iterate through all the filemods
        #
        for fm in proc.filemods:
            #
            # if an md5 exists then save it to our set
            #
            if fm.md5:
                md5_list.add(fm.md5)

    #
    # Initialize Prgoress Bar
    #
    pbar = ProgressBar(widgets=[Percentage(), Bar()], maxval=len(md5_list)).start()

    #
    # CSV
    #
    if not opts.output_file:
        output_file = open("new_binaries_after_date.csv", 'wb')
    else:
        output_file = open(opts.output_file, 'wb')
    csv_writer = csv.writer(output_file)
    csv_writer.writerow(("Binary MD5", "Binary Link", "Signature Status", "Company",
                         "Observed Date", "Host Count", "Binary TimeStamp", "Number of Executions"))

    #
    # Iterate through our set
    #
    for i, md5 in enumerate(md5_list):

        pbar.update(i + 1)

        try:
            #
            # refresh our binary object with the CbER server
            # Note: this might cause an exception if the binary is not found
            #
            binary = cb.select(Binary, md5)
            if not binary:
                continue
            binary.refresh()

            #
            # Get the binary timestamp
            #
            binary_timestamp = time.asctime(time.gmtime(pefile.PE(data=binary.file.read()).FILE_HEADER.TimeDateStamp))
        except ObjectNotFoundError:
            pass
        else:

            #
            # Get the number of times executed by retrieving the number of search results
            #
            number_of_times_executed = len(cb.select(Process).where("process_md5:{0:s}".format(md5)))

            csv_writer.writerow((binary.md5,
                                 binary.webui_link,
                                 binary.digsig_result if binary.digsig_result else "UNSIGNED",
                                 binary.company_name,
                                 binary.server_added_timestamp,
                                 binary.host_count,
                                 binary_timestamp,
                                 number_of_times_executed))
    pbar.finish()
コード例 #19
0
#%%
import pandas as pd
from cbapi.response import CbEnterpriseResponseAPI, Sensor, SensorGroup, Process, Binary
#%%
cb = CbEnterpriseResponseAPI()

query = cb.select(Binary)
query = query.where('is_executable_image:true')
#This runs pretty much instantly on test with 10 000 binaries in server
bin_list = []

for binary in query:
    bin_list.append(binary._info)

bin_df = pd.DataFrame(bin_list)
bin_df = bin_df.explode('observed_filename')
#%%

grouped_by_name = bin_df.groupby('observed_filename')[
    'md5', 'digsig_publisher', 'internal_name'].nunique()
コード例 #20
0
# UserProfilesView.exe is freeware available at: http://www.nirsoft.net/utils/user_profiles_view.html
# HTML data file will contain user accounts on the remote sensor and light details about each.
# HTML data file will be named: SensorHostnameHere-UserAccountData.html
#
# Could exit with an error if:
#                               - A windows exception is thrown for any rare reason.
#                               - The live-response session to the sensor has a timeout.
#
# File: Get_User_Accounts.py
# Date: 06/20/2017
# Author: Jared F

import time
from cbapi.response import CbEnterpriseResponseAPI, Sensor

c = CbEnterpriseResponseAPI()
from distutils.version import LooseVersion

c.cb_server_version = LooseVersion('5.1.0')

sensors = c.select(
    Sensor, 1)  # Define sensor, could do sensors = c.select(Sensor) for all
upv_path = r"C:\Users\admin\Desktop\CBScripts"  # Where is local UserProfilesView.exe ?
save_path = r"C:\Users\admin\Desktop\CBScripts\dumps"  # Where to save user accounts data file returned ?

s = sensors  # We'd use this if only checking one sensor
# for s in sensors:  # We'd use this if sensors was a list, not a single sensor

print("[INFO] Establishing session to CB Sensor #" + str(s.id))

try:
コード例 #21
0
# UserProfilesView.exe is freeware available at: http://www.nirsoft.net/utils/user_profiles_view.html
# HTML data file will contain user accounts on the remote sensor and light details about each.
# HTML data file will be named: SensorHostnameHere-UserAccountData.html
#
# Could exit with an error if:
#                               - A windows exception is thrown for any rare reason.
#                               - The live-response session to the sensor has a timeout.
#
# File: Get_User_Accounts.py
# Date: 06/20/2017
# Author: Jared F

import time
from cbapi.response import CbEnterpriseResponseAPI, Sensor

c = CbEnterpriseResponseAPI()
from distutils.version import LooseVersion
c.cb_server_version = LooseVersion('5.1.0')

sensors = c.select(Sensor, 1)    # Define sensor, could do sensors = c.select(Sensor) for all
upv_path = r"C:\Users\admin\Desktop\CBScripts"  # Where is local UserProfilesView.exe ?
save_path = r"C:\Users\admin\Desktop\CBScripts\dumps"  # Where to save user accounts data file returned ?

s = sensors  # We'd use this if only checking one sensor
# for s in sensors:  # We'd use this if sensors was a list, not a single sensor

print("[INFO] Establishing session to CB Sensor #" + str(s.id))

try:
    session = c.live_response.request_session(s.id)
    print("[SUCCESS] Connected to CB Sensor on Session #" + str(session.session_id))
コード例 #22
0
# This python script will put a x86 or x64 executable on all Windows endpoints, run it, and return output if desired.
# This script uses threading to allow multiple concurrent processing.
#
# File: Install Mass Process and Run.py
# Date: 06/20/2018
# Author: Jared F, keithmccammon (RedCanaryCo)

import sys
import threading
from Queue import Queue
from time import sleep
from cbapi.response import CbEnterpriseResponseAPI, Sensor
from cbapi.errors import *

c = CbEnterpriseResponseAPI()

### ==========[START CONFIG VARIABLES]========== ###
main_query = c.select(Sensor).all()  # All endpoints
    # c.select(Sensor).where('groupid:1')  # One endpoint group only
    # c.select(Sensor).where('hostname:HostNameHere')  # One endpoint only
    # See 'Custom-exclusions can be added here' to add specific exclusions
log_name = 'Mass Install Log.txt'  # Script output will be directed to this log file
process_name_x86 = 'RunMe.exe'  # What is executable name for x86 (32-bit Operating System)?
process_name_x64 = 'RunMe.exe'  # What is executable name for x64 (64-bit Operating System)?
process_location_local = r'C:\Users\analyst\Desktop'  # Where are local executables listed above ?
process_location_remote = r'C:\Windows\CarbonBlack\Tools'  # Where to place executable on remote Sensor ?
process_args = ''  # Executable arguments to run, ENSURE a leading space if not empty!
wait_for_output_bool = False  # Wait for the process to output something before continuing.
wait_for_completion_bool = False  # Wait for process to complete before continuing.
process_run_timeout = 30  # Timeout for process in seconds, if reached the install/execute will be reattempted.
delete_process_after = False  # Should remote executable be deleted after execution?
コード例 #23
0
# This python script will go through a CSV file and group hosts from one column into the group from an adjacent column.
#
# File: Regroup Sensors Using CSV.py
# Date: 09/22/2017 - Modified: 06/20/2018
# Author: Jared F

import csv
from cbapi.response import CbEnterpriseResponseAPI, Sensor, SensorGroup

c = CbEnterpriseResponseAPI()

CSV_file = r'C:\Users\analyst\Desktop\host-group.csv'  # Where is the CSV file?
HostnameColNum = 0  # What column are the host names in? Note: The first column is 0, not 1.
GroupNameColNum = 1  # What column are the new/correct group names in? Note: The first column is 0, not 1.

with open(CSV_file, 'rU') as csvfile:
    csvDialect = csv.Sniffer().sniff(csvfile.readline())
    csvfile.seek(0)
    csvfile = csv.reader(csvfile,
                         dialect=csvDialect,
                         delimiter=csvDialect.delimiter)
    for row in csvfile:
        if row[HostnameColNum] and row[GroupNameColNum]:
            # print ('[DEBUG] Row: ' + str(row))  # For debugging, prints the row out
            host_name = str(row[HostnameColNum]).lower().strip()
            group_name = (str(row[GroupNameColNum]).lower()).strip()

            if True is True:  # Add any exclusions here, if desired
                try:
                    group = c.select(SensorGroup).where(
                        'name:{0}'.format(group_name)).first()
コード例 #24
0
def main():

    #
    # Disable requests insecure warnings
    #
    disable_insecure_warnings()

    #
    # parse arguments
    #
    parser = build_cli_parser("New Binaries with Netconns")
    parser.add_argument("-d",
                        "--date-to-query",
                        action="store",
                        dest="date",
                        help="New since DATE, format YYYY-MM-DD")
    parser.add_argument("-f",
                        "--output-file",
                        action="store",
                        dest="output_file",
                        help="output file in csv format")

    opts = parser.parse_args()
    if not opts.date:
        parser.print_usage()
        sys.exit(-1)
    #
    # Initalize the cbapi-ng
    # TODO get_cb_object
    #
    cb = CbEnterpriseResponseAPI()

    #
    # Main Query
    #
    start_date = "[" + opts.date + "T00:00:00 TO *]"
    binary_query = cb.select(Binary).where(
        ("host_count:[1 TO 3]"
         " server_added_timestamp:" + start_date + " -observed_filename:*.dll"
         " -digsig_publisher:Microsoft*"
         " -alliance_score_srstrust:*"))
    #
    # Setup the csv writer
    #
    if not opts.output_file:
        output_file = open("new_binaries_with_netconns.csv", 'wb')
    else:
        output_file = open(opts.output_file, 'wb')
    csv_writer = csv.writer(output_file)
    #
    # Write out CSV header
    #
    csv_writer.writerow(
        ("FileName", "Hostname", "Username", "Network Connections",
         "Process Link", "Binary Link", "Binary MD5", "Signature Status",
         "Company", "Observed Date", "Host Count", "Binary TimeStamp"))

    #
    # Create Progress Bar
    #
    pbar = ProgressBar(widgets=[Percentage(), Bar()],
                       maxval=len(binary_query)).start()

    for i, binary in enumerate(binary_query):

        #
        # Update progress bar
        #
        pbar.update(i + 1)

        #
        # Retrieve the binary timestamp
        #
        binary_timestamp = time.asctime(
            time.gmtime(
                pefile.PE(data=binary.file.read()).FILE_HEADER.TimeDateStamp))

        #
        # Build a sub query to see if this binary was executed and had netconns
        #
        sub_query = "process_md5:" + binary.md5 + " netconn_count:[1 TO *]"
        process_query = cb.select(Process).where(sub_query)

        #
        # Iterate through results
        #
        for process in process_query:

            #
            # Write out the result
            #
            csv_writer.writerow(
                (process.path, process.hostname, process.username,
                 process.netconn_count, process.webui_link, binary.webui_link,
                 binary.md5,
                 binary.digsig_result if binary.digsig_result else "UNSIGNED",
                 binary.company_name, binary.server_added_timestamp,
                 binary.host_count, binary_timestamp))
    pbar.finish()
コード例 #25
0
def main():
    parser = build_cli_parser("Timeline utility")

    # Output options
    output_events = parser.add_argument_group(
        'output_events',
        "If any output type is set, all other types will be suppressed unless they are explicitly set as well."
    )
    output_events.add_argument("--filemods",
                               action="store_true",
                               help="Output file modification records.")
    output_events.add_argument("--netconns",
                               action="store_true",
                               help="Output network connection records.")
    output_events.add_argument("--processes",
                               action="store_true",
                               help="Output process start records.")
    output_events.add_argument("--regmods",
                               action="store_true",
                               help="Output registry modification records.")

    args = parser.parse_args()

    if args.prefix:
        filename = '{0}-timeline.csv'.format(args.prefix)
    else:
        filename = 'timeline.csv'

    if args.append == True or args.queryfile is not None:
        file_mode = 'a'
    else:
        file_mode = 'w'

    if args.days:
        query_base = ' start:-{0}m'.format(args.days * 1440)
    elif args.minutes:
        query_base = ' start:-{0}m'.format(args.minutes)
    else:
        query_base = ''

    # This is horrible. All are False by default. If all are False, then set
    # all to True. If any are set to True, then evaluate each independently.
    # If you're reading this and know of a cleaner way to do this, ideally via
    # argparse foolery, by all means . . .
    if args.filemods == False and \
       args.netconns == False and \
       args.processes == False and \
       args.regmods == False:
        (filemods, netconns, processes, regmods) = (True, True, True, True)
    else:
        filemods = args.filemods
        netconns = args.netconns
        processes = args.processes
        regmods = args.regmods

    if args.profile:
        cb = CbEnterpriseResponseAPI(profile=args.profile)
    else:
        cb = CbEnterpriseResponseAPI()

    queries = []
    if args.query:
        queries.append(args.query)
    elif args.queryfile:
        with open(args.queryfile, 'r') as f:
            for query in f.readlines():
                queries.append(query.strip())
        f.close()
    else:
        queries.append('')

    file = open(filename, file_mode)
    writer = csv.writer(file)
    writer.writerow([
        "event_type", "timestamp", "hostname", "username", "path", "cmdline",
        "process_md5", "parent", "childproc_count", "url", "netconn_domain",
        "netconn_remote_ip", "netconn_remote_port", "netconn_local_ip",
        "netconn_local_port", "netconn_proto", "netconn_direction",
        "filemod_path", "filemod_type", "filemod_md5", "regmod_path",
        "regmod_type"
    ])

    for query in queries:
        result_set = process_search(cb, query, query_base, filemods, netconns,
                                    processes, regmods)

        for row in result_set:
            if _python3 == False:
                row = [
                    col.encode('utf8') if isinstance(col, unicode) else col
                    for col in row
                ]
            writer.writerow(row)

    file.close()
コード例 #26
0
##	Copyright 2016 Jeff Rotenberger
##
## ----------------------------------------------------------------------------------------------------------------------------------------
##
##
## ----------------------------------------------------------------------------------------------------------------------------------------

## ----------------------------------------------------------------------------------------------------------------------------------------
## Set Target
## ----------------------------------------------------------------------------------------------------------------------------------------

import time

from cbapi.response import CbEnterpriseResponseAPI, Sensor

c = CbEnterpriseResponseAPI()

print("Enter Sensor ID")
name = input()
sensor_id = name
sensor = c.select(Sensor, sensor_id)

with sensor.lr_session(
) as session:  # this will wait until the Live Response session is established
    session.put_file(open("\\\\DIRECTORY\\artifactpullcb.ps1", "rb"),
                     "c:\\windows\\CarbonBlack\\artifactpullcb.ps1")
    session.create_process("PowerShell SET-EXECUTIONPOLICY UNRESTRICTED")
    output = session.create_process("PowerShell .\\artifactpullcb.ps1")
    session.create_process("PowerShell SET-EXECUTIONPOLICY RESTRICTED")
    time.sleep(1000)
    print output
コード例 #27
0
    while status != desired_status and time.time() - start_time < timeout:
        res = cb.get_object(url)
        if res["status"] == desired_status:
            log.debug(json.dumps(res))
            return res
        elif res["status"] == "error":
            raise LiveResponseError(res)
        else:
            time.sleep(delay)

    raise TimeoutError(uri=url, message="timeout polling for Live Response")


if __name__ == "__main__":
    from cbapi.response import CbEnterpriseResponseAPI
    import logging
    root = logging.getLogger()
    root.addHandler(logging.StreamHandler())

    logging.getLogger("cbapi").setLevel(logging.DEBUG)

    c = CbEnterpriseResponseAPI()
    j = GetFileJob(r"c:\test.txt")
    with c.select(Sensor, 3).lr_session() as lr_session:
        file_contents = lr_session.get_file(r"c:\test.txt")

    future = c.live_response.submit_job(j.run, 3)
    wait([future, ])
    print(future.result())
コード例 #28
0
def main():
    parser = build_cli_parser("Network utility")

    # Non-exlusive query terms. Note that we're passing them this way because
    # we can't risk the user passing terms that Cb can't search (i.e., a
    # process-level term plus an event-level term). These are joined by AND,
    # not OR.
    parser.add_argument("--hostname",
                        type=str,
                        action="store",
                        help="Search for hostname")
    parser.add_argument("--username",
                        type=str,
                        action="store",
                        help="Search for username")

    # Whitelist conditions
    parser.add_argument("--whitelist",
                        type=str,
                        action="store",
                        help="Path to whitelist file.")
    parser.add_argument(
        "--ignore-hosts",
        type=str,
        action="store",
        help="Path to file listing IPs to ignore traffic to/from.")
    parser.add_argument("--noloopback",
                        action="store_false",
                        help="Ignore connections to and from 127.0.0.1.")
    parser.add_argument("--nomulticast",
                        action="store_false",
                        help="Ignore multicast connections")
    parser.add_argument("--ignore-private-dest",
                        action="store_true",
                        help="Ignore connections to RFC1918 networks.")

    # Traffic attributes
    d = parser.add_mutually_exclusive_group(required=False)
    d.add_argument("--inbound",
                   action="store_true",
                   help="Report only inbound netconns.")
    d.add_argument('--outbound',
                   action="store_true",
                   help="Report only outbound netconns.")

    p = parser.add_mutually_exclusive_group(required=False)
    p.add_argument("--tcp",
                   action="store_true",
                   help="Report only UDP netconns.")
    p.add_argument('--udp',
                   action="store_true",
                   help="Report only TCP netconns.")

    # Endpoint attributes
    t = parser.add_mutually_exclusive_group(required=False)
    t.add_argument("--workstations",
                   action="store_true",
                   help="Only process workstations.")
    t.add_argument("--servers",
                   action="store_true",
                   help="Only process servers.")

    # Query and inspection limiting
    parser.add_argument(
        "--inspect-limit",
        dest="inspect_limit",
        type=int,
        action="store",
        default="5000",
        help="Limit netconns per process that we inspect (default: 5000.")

    # Shortcuts for speed
    parser.add_argument(
        "--domain",
        dest="domain",
        action="store",
        help="Quick search for only those events with a domain match.")
    parser.add_argument(
        "--port",
        dest="port",
        action="store",
        help="Quick search for only those events involving a specific port.")
    parser.add_argument(
        "--ipaddr",
        dest="ipaddr",
        action="store",
        help="Quick search for only those events with an IP match.")

    args = parser.parse_args()

    if args.prefix:
        output_filename = '%s-netconns.csv' % args.prefix
    else:
        output_filename = 'netconns.csv'

    if args.append == True or args.queryfile is not None:
        file_mode = 'a'
    else:
        file_mode = 'w'

    # Query buildup
    if args.days:
        query_base = ' start:-%dm' % (args.days * 1440)
    elif args.minutes:
        query_base = ' start:-%dm' % args.minutes
    else:
        query_base = ''

    if args.servers:
        query_base += ' (host_type:"domain_controller" OR host_type:"server")'
    elif args.workstations:
        query_base += ' host_type:"workstation"'

    if args.hostname:
        query_base += ' hostname:{0}'.format(args.hostname)
    if args.username:
        query_base += ' username:{0}'.format(args.username)

    if args.whitelist:
        query_base += build_whitelist(args.whitelist)

    if args.ignore_hosts:
        ignore_hosts = get_hosts(args.ignore_hosts)

    if args.domain:
        query_base += ' domain:%s' % args.domain
    elif args.ipaddr:
        query_base += ' ipaddr:%s' % args.ipaddr
    elif args.port:
        query_base += ' ipport:%s' % args.port
    else:
        query_base += ' netconn_count:[1 TO *]'

    if args.inbound:
        direction = 'Inbound'
    elif args.outbound:
        direction = 'Outbound'
    else:
        direction = None

    udp = True
    tcp = True
    if args.tcp and not args.udp:
        udp = False
    elif args.udp and not args.tcp:
        tcp = False

    # Connect and stage queries
    if args.profile:
        cb = CbEnterpriseResponseAPI(profile=args.profile)
    else:
        cb = CbEnterpriseResponseAPI()

    # TODO - Update this routine to guard against impossible queries.
    queries = []
    if args.query:
        queries.append(args.query)
    elif args.queryfile:
        with open(args.queryfile, 'r') as f:
            for query in f.readlines():
                if ':' in query:
                    queries.append(query.strip())
        f.close()
    else:
        queries.append('')

    # Main routine and output
    output_file = open(output_filename, file_mode)
    writer = csv.writer(output_file)
    if args.append is False:
        writer.writerow([
            "timestamp", "path", "hostname", "username", "domain", "proto",
            "direction", "local_ip", "local_port", "remote_ip", "remote_port"
        ])

    for query in queries:
        result_set = process_search(
            cb,
            query,
            query_base,
            limit=args.inspect_limit,
            direction=direction,
            loopback=args.noloopback,
            ignore_hosts=args.ignore_hosts,
            ignore_private_dest=args.ignore_private_dest,
            multicast=args.nomulticast,
            tcp=tcp,
            udp=udp,
            domain=args.domain)

        for r in result_set:
            row = list(r)
            if _python3 == False:
                row = [
                    col.encode('utf8') if isinstance(col, unicode) else col
                    for col in row
                ]
            writer.writerow(row)

    output_file.close()
コード例 #29
0
# This python script will iterate through the list of sensors on the CB Server and return basic details of each sensor.
# Output is in comma-seperated, and can be used for simple CSV creation.
# Reflects all sensors, reported by the 'Total Sensor Count' in the CB Server Dashboard.
#

# File: Forcefully Delete Path or File.py
# Date: 06/20/2017 - Modified: 06/15/2018
# Author: Jared F

from cbapi.response import CbEnterpriseResponseAPI, Sensor

c = CbEnterpriseResponseAPI()
sensors = c.select(Sensor).all()

ids = []

print('[INFO] Detecting all sensors known to CB Server...\n')
print('Sensor ID,Hostname,Status,OS,Group ID,Group,IP,MAC,Last Seen')

for s in sensors:

    ip_listing = ''
    mac_listing = ''

    for x in s.network_interfaces:  # Get all network interfaces ever seen from the host (ips/macs)

        try: ip_listing = ip_listing + (str(x).split("ipaddr=u'")[1].split("'")[0]) + ' '
        except Exception as err: print err

        try: mac_listing = mac_listing + (str(x).split("macaddr=u'")[1].split("'")[0]) + ' '
        except Exception as err: print err
コード例 #30
0
# This python script will iterate through the list of sensors on the CB Server and return basic details of each sensor.
# Output is in comma-seperated, and can be used for simple CSV creation.
# Reflects all sensors, reported by the 'Total Sensor Count' in the CB Server Dashboard.
#

# File: Forcefully Delete Path or File.py
# Date: 06/20/2017 - Modified: 06/15/2018
# Author: Jared F

from cbapi.response import CbEnterpriseResponseAPI, Sensor

c = CbEnterpriseResponseAPI()
sensors = c.select(Sensor).all()

ids = []

print('[INFO] Detecting all sensors known to CB Server...\n')
print('Sensor ID,Hostname,Status,OS,Group ID,Group,IP,MAC,Last Seen')

for s in sensors:

    ip_listing = ''
    mac_listing = ''

    for x in s.network_interfaces:  # Get all network interfaces ever seen from the host (ips/macs)

        try:
            ip_listing = ip_listing + (
                str(x).split("ipaddr=u'")[1].split("'")[0]) + ' '
        except Exception as err:
            print err
コード例 #31
0
# This python script will iterate through the list of sensors on the CB Server and return basic details of each sensor.
# Some sensors returned are no longer licensed and active. Output reflects "Total Sensor Count" sensors in CB dashboard.
#
# File: List_All_Sensors.py
# Date: 06/20/2017
# Author: Jared F

from cbapi.response import CbEnterpriseResponseAPI, Sensor

c = CbEnterpriseResponseAPI()
from distutils.version import LooseVersion
c.cb_server_version = LooseVersion('5.1.0')
# Using legacy 5.1.0 due to sensor iteration bug.
# See bug report: github.com/carbonblack/cbapi-python/issues/67

sensors = c.select(Sensor).all()

ids = []

print("[INFO] Detecting all sensors known to CB Server...\n")
for s in sensors:

    sep = ','  # Comma separates the IP from the MAC in network_adapters string, there might be a cleaner way to do this, but it works
    ip = str(s.network_adapters).split(
        sep, 1)[0]  # Removes MAC address from network_adapters string
    print("Sensor ID: " + str(s.id) + " | Status: " + s.status + " | OS: " +
          s.os_environment_display_string + " | Group: " + s.group.name +
          " | Hostname: " + s.hostname + " | IP: " + ip + " | Last Seen: " +
          str(s.last_checkin_time))
    if s._model_unique_id not in ids:  # May re-loop certain ID's for unknown reason, this protects against repetition. Legacy 5.1.0 should also help protect against this.
        ids.append(s._model_unique_id)
コード例 #32
0
#
#
# File: AlienVault_IOC_Processor.py
# Date: 07/17/2017
# Author: Jared F

import csv
import os
from six import PY3
import subprocess
from cbapi.errors import ObjectNotFoundError
from cbapi.response import CbEnterpriseResponseAPI, Process, Binary
from cbapi.response.models import CbChildProcEvent, CbFileModEvent, CbNetConnEvent, CbRegModEvent, CbModLoadEvent, CbCrossProcEvent
from urlparse import urlparse

c = CbEnterpriseResponseAPI()

IOC_file_path = r'C:\Users\YourAccount\Desktop\iocs'  # What is the AlienVault IOC output path? Must match OutputPath from AlienVault_IOC_Getter.py

IOC_report_path = r'C:\Users\YourAccount\Desktop\AlienVault_OTX_IOC_Report.csv'  # What is the IOC result file path? Must be a CSV

global ips
global domains
global md5s
global paths

global IOCCol
global Hits  # Total IOC hits count


# UnicodeWriter class from http://python3porting.com/problems.html
コード例 #33
0
#

# Could exit with an error if:
#                               - A windows exception is thrown for any other rare reason.
#                               - The live-response session to the sensor has a timeout.
#
# File: Pull_Cb_Logs.py
# Date: 07/14/2017
# Author: Jared F

import time
import os

from cbapi.response import CbEnterpriseResponseAPI, Sensor

c = CbEnterpriseResponseAPI()

sensor = c.select(Sensor, 1)
hst = sensor.hostname

p = (r"C:\Windows\CarbonBlack")  # Path to retrieve log files from
save_path = r"C:\Users\YourAccount\Desktop\{0}".format(hst)  # Where to save retrieved log files, {0} is the hostname

p = os.path.normpath(p)  # Ensures proper OS path syntax
save_path = os.path.normpath(save_path)  # Ensures proper OS path syntax

extensions_to_grab = [".txt", ".log", ".dump", ".dmp", ".tmp", ".db", ".html", "catalog"]

print("[INFO] Establishing session to CB Sensor #" + str(sensor.id))
try:
    session = c.live_response.request_session(sensor.id)
コード例 #34
0
# This python script will run the batch script created by Lawrence Abrams on each Windows endpoint.
# This batch file deploys a vaccine against the NotPetya Ransomeware/Wiper attack.
# The batch file can be found at: https://download.bleepingcomputer.com/bats/nopetyavac.bat
#
# File: NotPetyaVaccine.py
# Date: 06/30/2017
# Author: Jared F

import time
from cbapi.response import CbEnterpriseResponseAPI, Sensor

c = CbEnterpriseResponseAPI()
from distutils.version import LooseVersion
c.cb_server_version = LooseVersion('5.1.0')
# Legacy 5.1.0 used due to bug with how paginated queries are returned in latest version.

sensors = c.select(Sensor).all()
batch_path = r"C:\Users\YourAcccount\Desktop" # Where is local nopetyavac.bat on your PC?

SensorsAwaitingVaccine = []

for sens in sensors:
    if 'windows' in sens.os_environment_display_string.lower():
        if 'Uninstall' not in sens.status.lower():
            SensorsAwaitingVaccine.append(sens)  # We're creating a list of all installed Windows sensors

print("[INFO] " + str(len(SensorsAwaitingVaccine)) + " sensors will be vaccinated against the NotPetya Ransomware/Wiper...")

while len(SensorsAwaitingVaccine) != 0:  # We're going to loop over these indefinitely until all have the vaccine
    for s in SensorsAwaitingVaccine:
        if 'online' in s.status.lower():
コード例 #35
0



from cbapi.response import CbEnterpriseResponseAPI, Sensor

cb = CbEnterpriseResponseAPI()

print "Enter hostname (ALL CAPS):"

MY_HOSTNAME = raw_input()

sensor = cb.select(Sensor).where("hostname:"+ MY_HOSTNAME).first()
if not sensor:
    print "no sensor"
else:
    print sensor.id



コード例 #36
0
## ----------------------------------------------------------------------------------------------------------------------------------------
## CbR KAPE Pull Script
## ----------------------------------------------------------------------------------------------------------------------------------------
import time
import requests
from cbapi.response import CbEnterpriseResponseAPI, Sensor

ApiToken = "apikey"
c = CbEnterpriseResponseAPI(url="url", token=ApiToken, ssl_verify=False)

script_path = r'x:\scriptpath'  # Where is local Kape.ps1 script?

print('Enter Sensor ID:')
sensor_id = input()
session_id = ""

try:
    # Connect to CbR and establish live response session
    sensor = c.select(Sensor, sensor_id)
    print('[INFO] Establishing session to CB Sensor #' + str(sensor.id) + '(' + sensor.hostname + ')')

    session = c.live_response.request_session(sensor.id)
    print('[SUCCESS] Connected on Session #' + str(session.session_id))
    session_id = session.session_id
    
    try: session.create_directory('C:\Windows\CarbonBlack\Tools')
    except Exception: pass  # Existed already

    # Transfer scripts to live response host
    print('[INFO] Transfering script files to CB Sensor #' + sensor_id)
    try: session.put_file(open(script_path + '\Kape.zip', 'rb'), 'C:\Windows\CarbonBlack\Tools\Kape.zip')
コード例 #37
0
# This python script will go through a CSV file and group hosts from one column into the group from an adjacent column.
#
# File: Regroup Sensors Using CSV.py
# Date: 09/22/2017 - Modified: 06/20/2018
# Author: Jared F

import csv
from cbapi.response import CbEnterpriseResponseAPI, Sensor, SensorGroup

c = CbEnterpriseResponseAPI()

CSV_file = r'C:\Users\analyst\Desktop\host-group.csv'  # Where is the CSV file?
HostnameColNum = 0  # What column are the host names in? Note: The first column is 0, not 1.
GroupNameColNum = 1  # What column are the new/correct group names in? Note: The first column is 0, not 1.

with open(CSV_file, 'rU') as csvfile:
    csvDialect = csv.Sniffer().sniff(csvfile.readline())
    csvfile.seek(0)
    csvfile = csv.reader(csvfile, dialect=csvDialect, delimiter=csvDialect.delimiter)
    for row in csvfile:
        if row[HostnameColNum] and row[GroupNameColNum]:
            # print ('[DEBUG] Row: ' + str(row))  # For debugging, prints the row out
            host_name = str(row[HostnameColNum]).lower().strip()
            group_name = (str(row[GroupNameColNum]).lower()).strip()

            if True is True:  # Add any exclusions here, if desired
                try:
                    group = c.select(SensorGroup).where('name:{0}'.format(group_name)).first()
                    host = c.select(Sensor).where('hostname:{0}'.format(host_name)).first()
                    if group and host:  # If both are valid
                        old_group_name = str(host.group.name)
コード例 #38
0
# This is a forceful deletion in that the script kills (.exe) processes in the pre-defined file or path before deleting.
# Could exit with an error if:
#                               - cb.exe is not running with the highest level of permission.
#                               - Another running .exe prevents an .exe from being deleted (self-defense or reliance)
#                               - A windows exception is thrown for any other rare reason.
#                               - The live-response session to the sensor has a timeout.
#
# File: Forcefully_Delete_Path_Or_File.py
# Date: 06/19/2017
# Author: Jared F

import time
import os
from cbapi.response import CbEnterpriseResponseAPI, Sensor

c = CbEnterpriseResponseAPI()

p = (r"C://Users//user//Desktop//MalwareFolder")  # Path to delete, r character before ensures slashes are treated correctly
sensors = c.select(Sensor, 1)  # Here we define 1 or more sensors we want to delete a file / path on

s = sensors  # We'd use this if only checking one sensor
# for s in sensors:  # We'd use this if sensors was a list, not a single sensor

print("[INFO] Establishing session to CB Sensor #" + str(s.id))

try:
    session = c.live_response.request_session(s.id)
    print("[SUCCESS] Connected to CB Sensor on Session #" + str(session.session_id))
    
    path = session.walk(p, False)  # Walk path. False parameter is to bottom->up walk, not top->down
    exes = []
コード例 #39
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument("--prefix",
                        type=str,
                        action="store",
                        help="Output filename prefix.")
    parser.add_argument("--profile",
                        type=str,
                        action="store",
                        help="The credentials.response profile to use.")

    # Time boundaries for the survey
    parser.add_argument("--days",
                        type=int,
                        action="store",
                        help="Number of days to search.")
    parser.add_argument("--minutes",
                        type=int,
                        action="store",
                        help="Number of days to search.")

    # Survey criteria
    i = parser.add_mutually_exclusive_group(required=True)
    i.add_argument('--deffile',
                   type=str,
                   action="store",
                   help="Definition file to process (must end in .json).")
    i.add_argument('--defdir',
                   type=str,
                   action="store",
                   help="Directory containing multiple definition files.")
    i.add_argument('--query',
                   type=str,
                   action="store",
                   help="A single Cb query to execute.")
    i.add_argument(
        '--iocfile',
        type=str,
        action="store",
        help="IOC file to process. One IOC per line. REQUIRES --ioctype")
    parser.add_argument('--hostname',
                        type=str,
                        action="store",
                        help="Target specific host by name.")
    parser.add_argument('--username',
                        type=str,
                        action="store",
                        help="Target specific username.")
    parser.add_argument('--group',
                        type=str,
                        action="store",
                        help="Target specific sensor group")

    # IOC survey criteria
    parser.add_argument('--ioctype',
                        type=str,
                        action="store",
                        help="One of: ipaddr, domain, md5")

    args = parser.parse_args()

    if (args.iocfile is not None and args.ioctype is None):
        parser.error('--iocfile requires --ioctype')

    if args.prefix:
        output_filename = '%s-survey.csv' % args.prefix
    else:
        output_filename = 'survey.csv'

    query_base = ''
    if args.days:
        query_base += ' start:-%dm' % (args.days * 1440)
    elif args.minutes:
        query_base += ' start:-%dm' % args.minutes

    if args.hostname:
        if args.query and 'hostname' in args.query:
            parser.error('Cannot use --hostname with "hostname:" (in query)')
        query_base += ' hostname:%s' % args.hostname

    if args.username:
        if args.query and 'username' in args.query:
            parser.error('Cannot use --username with "username:"******"group:" (in query)')
        query_base += ' group:%s' % args.group

    definition_files = []
    if args.deffile:
        if not os.path.exists(args.deffile):
            err('deffile does not exist')
            sys.exit(1)
        definition_files.append(args.deffile)
    elif args.defdir:
        if not os.path.exists(args.defdir):
            err('defdir does not exist')
            sys.exit(1)
        for root, dirs, files in os.walk(args.defdir):
            for filename in files:
                if filename.endswith('.json'):
                    definition_files.append(os.path.join(root, filename))

    if _python3:
        output_file = open(output_filename, 'w', newline='')
    else:
        output_file = open(output_filename, 'wb')
    writer = csv.writer(output_file)
    writer.writerow([
        "endpoint", "username", "process_path", "cmdline", "program", "source"
    ])

    if args.profile:
        cb = CbEnterpriseResponseAPI(profile=args.profile)
    else:
        cb = CbEnterpriseResponseAPI()

    if args.query:
        result_set = process_search(cb, args.query, query_base)

        for r in result_set:
            row = [r[0], r[1], r[2], r[3], args.query, 'query']
            if _python3 == False:
                row = [
                    col.encode('utf8') if isinstance(col, unicode) else col
                    for col in row
                ]
            writer.writerow(row)
    elif args.iocfile:
        with open(args.iocfile) as iocfile:
            data = iocfile.readlines()
            for ioc in data:
                ioc = ioc.strip()
                query = '%s:%s' % (args.ioctype, ioc)
                result_set = process_search(cb, query, query_base)

                for r in result_set:
                    row = [r[0], r[1], r[2], r[3], ioc, 'ioc']
                    if _python3 == False:
                        row = [
                            col.encode('utf8')
                            if isinstance(col, unicode) else col for col in row
                        ]
                    writer.writerow(row)
    else:
        for definition_file in definition_files:
            log("Processing definition file: %s" % definition_file)
            basename = os.path.basename(definition_file)
            source = os.path.splitext(basename)[0]

            with open(definition_file, 'r') as fh:
                programs = json.load(fh)

            for program, criteria in programs.items():
                log("--> %s" % program)

                result_set = nested_process_search(cb, criteria, query_base)

                for r in result_set:
                    row = [r[0], r[1], r[2], r[3], program, source]
                    if _python3 == False:
                        row = [
                            col.encode('utf8')
                            if isinstance(col, unicode) else col for col in row
                        ]
                    writer.writerow(row)

    output_file.close()
コード例 #40
0
def main():

    dir_path = os.path.abspath(os.path.dirname(__file__))

    print(
        "Carbon Black Live Response - Retrieving User Folder Directory Listing "
    )

    strScriptName = "DirListing.ps1"
    strComputerName = sys.argv[1]
    strUserName = sys.argv[2]

    cb = CbEnterpriseResponseAPI()

    sensor = cb.select(Sensor).where("hostname:" + strComputerName).first()

    if not sensor:
        print("Computer not found!")
    elif sensor.status == "Offline":
        print("")
        print("Computer", strComputerName, "is", sensor.status)
        print("")
        os.system("pause")
    else:
        print("")
        print("Computer Name:", strComputerName)
        print("")
        print("Sensor ID:", sensor.id)
        print("")

        sensor_id = sensor.id

        sensor = cb.select(Sensor, sensor_id)

        with sensor.lr_session() as session:
            # Create a folder under C:\Windows\CarbonBlack named ExportedFiles
            path = "C:\\Windows\\CarbonBlack\\ExportedFiles"
            try:
                print(
                    "Creating direcotry: C:\Windows\CarbonBlack\ExportedFiles")
                session.create_directory(path)
            except Exception as e:
                print("  Directory already exists: %s" % e)

# Upload Script to the folder
            print(" ")
            print(("Uploading {0}....").format(strScriptName))
            print(" ")
            binary = r'{0}'.format(strScriptName)
            with open(binary, 'rb') as filedata:
                try:
                    session.put_file(
                        filedata.read(),
                        "C:\\Windows\\CarbonBlack\\ExportedFiles\\" + binary)
                except Exception as e:
                    session.close()


# Run PowerShell Script
            command = r'PowerShell.exe -nologo -file C:\Windows\CarbonBlack\ExportedFiles\{0} {1}'.format(
                strScriptName, strUserName)
            print("Executing: '{0}' ".format(command))
            session.create_process(command,
                                   wait_timeout=900,
                                   wait_for_completion=True)

            # Download .CSV file
            FileLocation = r'C:\Windows\CarbonBlack\ExportedFiles'
            filename = "DirectoryListing.csv"
            print(" ")
            print("Downloading DirectoryListing.csv....")
            open(
                "{0}\Output\{2}_{1}".format(dir_path, filename,
                                            strComputerName),
                "wb").write(
                    session.get_file(FileLocation + "\{0}".format(filename)))

            # Sleep for 30 sec before removing folder
            print(" ")
            print("Sleep 15 sec after Downloading .CSV file")
            time.sleep(15)

            # Remove Folder on remote computer
            command = r'cmd /c rmdir C:\Windows\CarbonBlack\ExportedFiles /s /q'
            print("Executing: '{0}' ".format(command))
            session.create_process(command,
                                   wait_timeout=30,
                                   wait_for_completion=True)

        session.close()
コード例 #41
0
def main():

    #
    # Disable requests insecure warnings
    #
    disable_insecure_warnings()

    #
    # Parse arguments
    #
    parser = build_cli_parser("System Check After Specified Date")
    parser.add_argument("-d",
                        "--date-to-query",
                        action="store",
                        dest="date",
                        help="New since DATE, format YYYY-MM-DD")
    parser.add_argument("-f",
                        "--output-file",
                        action="store",
                        dest="output_file",
                        help="output file in csv format")

    opts = parser.parse_args()
    if not opts.date:
        parser.print_usage()
        sys.exit(-1)

    #
    # Setup cbapi-ng
    # TODO get_cb_object
    cb = CbEnterpriseResponseAPI()

    #
    # query for all processes that match our query
    #
    print("Performing Query...")
    query = "filewrite_md5:* last_update:[" + opts.date + "T00:00:00 TO *]"
    process_query = cb.select(Process).where(query)

    #
    # Create a set so we don't have duplicates
    #
    md5_list = set()

    #
    # Iterate through all the processs
    #
    for proc in process_query:
        #
        # Iterate through all the filemods
        #
        for fm in proc.filemods:
            #
            # if an md5 exists then save it to our set
            #
            if fm.md5:
                md5_list.add(fm.md5)

    #
    # Initialize Prgoress Bar
    #
    pbar = ProgressBar(widgets=[Percentage(), Bar()],
                       maxval=len(md5_list)).start()

    #
    # CSV
    #
    if not opts.output_file:
        output_file = open("new_binaries_after_date.csv", 'wb')
    else:
        output_file = open(opts.output_file, 'wb')
    csv_writer = csv.writer(output_file)
    csv_writer.writerow(("Binary MD5", "Binary Link", "Signature Status",
                         "Company", "Observed Date", "Host Count",
                         "Binary TimeStamp", "Number of Executions"))

    #
    # Iterate through our set
    #
    for i, md5 in enumerate(md5_list):

        pbar.update(i + 1)

        try:
            #
            # refresh our binary object with the CbER server
            # Note: this might cause an exception if the binary is not found
            #
            binary = cb.select(Binary, md5)
            if not binary:
                continue
            binary.refresh()

            #
            # Get the binary timestamp
            #
            binary_timestamp = time.asctime(
                time.gmtime(
                    pefile.PE(
                        data=binary.file.read()).FILE_HEADER.TimeDateStamp))
        except ObjectNotFoundError:
            pass
        else:

            #
            # Get the number of times executed by retrieving the number of search results
            #
            number_of_times_executed = len(
                cb.select(Process).where("process_md5:{0:s}".format(md5)))

            csv_writer.writerow(
                (binary.md5, binary.webui_link,
                 binary.digsig_result if binary.digsig_result else "UNSIGNED",
                 binary.company_name, binary.server_added_timestamp,
                 binary.host_count, binary_timestamp,
                 number_of_times_executed))
    pbar.finish()
コード例 #42
0
# This python script will retrieve all finished Windows AV scan events.
# Windows Defender and Microsoft Security Client (Microsoft Antimalware) finished scan events will be separated if both exist.
# The script will also retrieve the last time a full scan was completed and print it to the console.
#
# File: "Retrieve AV Scan Events.py"
# Date: 09/01/2017 - Modified: 01/24/2019
# Authors: Jared F

import datetime
from cbapi.response import CbEnterpriseResponseAPI, Sensor

c = CbEnterpriseResponseAPI()

save_path = r'C:\Users\analyst\Desktop'  # Locally saves All_AV_Events.txt here
save_to_path = ''  # Required, leave as a blank string

print('Enter Sensor ID:')
sensor_id = raw_input()
# sensor_id = 150  # Use this to define the sensor ID in the script, rather than using input

try:
    sensor = c.select(Sensor, sensor_id)
    print('[INFO] Establishing session to CB Sensor #' + str(sensor.id) + '(' + sensor.hostname + ')')
    session = c.live_response.request_session(sensor.id)
    print("[SUCCESS] Connected on Session #" + str(session.session_id))

    try: session.create_directory('C:\Windows\CarbonBlack\Reports')
    except Exception: pass  # Existed already

    session.create_process(r'''cmd.exe /c wevtutil qe "System" /rd:True /q:"*[System[Provider[@Name='Microsoft Antimalware'] and (EventID=1001)]]" /f:Text > C:\Windows\CarbonBlack\Reports\Antimalware_Scan_Events.txt''', True)
    session.create_process(r'''cmd.exe /c wevtutil qe "Microsoft-Windows-Windows Defender/Operational" /rd:True /q:*[System[(EventID=1001)]] /f:Text > C:\Windows\CarbonBlack\Reports\Defender_Scan_Events.txt''', True)
コード例 #43
0
def main():
    c = CbEnterpriseResponseAPI(profile="default")
    sensors = c.select(Sensor)
    for sensor in sensors:
        print sensor.computer_name, sensor.id, sensor.status, sensor.physical_memory_size, sensor.computer_dns_name,sensor.next_checkin_time
コード例 #44
0
    #script to run - must be in PSScripts dir
    script='PSScripts/get-autoruns.ps1'

    #encoding of the tools output. cp1252 is the windows default
    # default can be checked with [System.Text.Encoding]::Default in powershell
    code='cp1252'
    #dir to write the files to
    output_dir='powershelloutput_'+Group
    #extension to append on hostnames for file output
    output_ext='_psautoruns.csv'

    if not os.path.exists(output_dir):
        os.mkdir(output_dir)

    group=cb.select(SensorGroup).where("name:"+Group).first()
    print(group)
    futures = []
    for sensor in group.sensors:
        # def __init__(self, HostName, ToolName='', Commandline='',code='UTF-8', OutputDir='Results', OutputExtension='.csv', remove=True, use_existing=False):
        job=RunRemotely(sensor.hostname,script,code=code,OutputDir=output_dir,OutputExtension=output_ext)
        print(sensor.hostname)
        futures.append(cb.live_response.submit_job(job.Run, sensor))
        print('job submitted')
    wait(futures)
#%%
if __name__ == '__main__':
    
    cb=CbEnterpriseResponseAPI()
    RunPowershell(cb, "X80")
    combine_count_autoruns(r'Q:/CBAutomation-master/powershelloutput_X87', 'combined_psautoruns.xlsx', '_psautoruns.csv')
コード例 #45
0
#
# File: ProcessIOCs.py
# Date: 07/14/2017
# Author: Jared F

import csv
import os
import sys
from six import PY3
from cbapi.errors import ObjectNotFoundError
from cbapi.response import CbEnterpriseResponseAPI, Process, Binary, BannedHash
from cbapi.response.models import CbChildProcEvent, CbFileModEvent, CbNetConnEvent, CbRegModEvent, CbModLoadEvent, CbCrossProcEvent
from cbapi.errors import ServerError, TimeoutError
from urlparse import urlparse

c = CbEnterpriseResponseAPI()

IOC_file = r"C:\Users\YourAccount\Desktop\IOC_Data_List.csv"  # Where is the IOC list data file? Must be a CSV
IOC_report = r"C:\Users\YourAccount\Desktop\IOC_Report.csv"  # Where should IOC result file be stored? Must be a CSV

# These are for what the script might encounter as types in the IOC list data file. Add as needed.
ipType = [
    "ip", "ipv4", "address", "ip address", "ipv4 address", "ip v4 address",
    "ip address v4"
]
domainType = ["domain", "hostname", "url", "uri", "website", "site"]
md5Type = [
    "md5", "message-digest algorithm v5", "message digest algorithm v5",
    "message digest v5", "filehash-md5", "hash"
]
pathType = ["file", "filepath", "file path", "path", "location", "process"]