Example #1
0
    def test_util_000_date_parsing(self):
        default_format = '2018-10-19 14:06:34 +0000'
        old_default_format = '2018-10-19 14:06:34'
        json_format = '2018-10-19T18:08:08.346118-05:00'
        old_json_format = '2018-10-19T18:08:08.346118'
        splunk_format = '2015-02-19T09:50:49.000-05:00'

        result = parse_event_time(default_format)
        self.assertEquals(result.year, 2018)
        self.assertEquals(result.month, 10)
        self.assertEquals(result.day, 19)
        self.assertEquals(result.hour, 14)
        self.assertEquals(result.minute, 6)
        self.assertEquals(result.second, 34)
        self.assertIsNotNone(result.tzinfo)
        self.assertEquals(int(result.tzinfo.utcoffset(None).total_seconds()), 0)

        result = parse_event_time(old_default_format)
        self.assertEquals(result.year, 2018)
        self.assertEquals(result.month, 10)
        self.assertEquals(result.day, 19)
        self.assertEquals(result.hour, 14)
        self.assertEquals(result.minute, 6)
        self.assertEquals(result.second, 34)
        self.assertIsNotNone(result.tzinfo)
        self.assertEquals(saq.LOCAL_TIMEZONE.tzname, result.tzinfo.tzname)
        
        result = parse_event_time(json_format)
        self.assertEquals(result.year, 2018)
        self.assertEquals(result.month, 10)
        self.assertEquals(result.day, 19)
        self.assertEquals(result.hour, 18)
        self.assertEquals(result.minute, 8)
        self.assertEquals(result.second, 8)
        self.assertIsNotNone(result.tzinfo)
        self.assertEquals(int(result.tzinfo.utcoffset(None).total_seconds()), -(5 * 60 * 60))

        result = parse_event_time(old_json_format)
        self.assertEquals(result.year, 2018)
        self.assertEquals(result.month, 10)
        self.assertEquals(result.day, 19)
        self.assertEquals(result.hour, 18)
        self.assertEquals(result.minute, 8)
        self.assertEquals(result.second, 8)
        self.assertIsNotNone(result.tzinfo)
        self.assertEquals(saq.LOCAL_TIMEZONE.tzname, result.tzinfo.tzname)

        result = parse_event_time(splunk_format)
        self.assertEquals(result.year, 2015)
        self.assertEquals(result.month, 2)
        self.assertEquals(result.day, 19)
        self.assertEquals(result.hour, 9)
        self.assertEquals(result.minute, 50)
        self.assertEquals(result.second, 49)
        self.assertIsNotNone(result.tzinfo)
        self.assertEquals(int(result.tzinfo.utcoffset(None).total_seconds()), -(5 * 60 * 60))
Example #2
0
    def execute_analysis(self, ipv4_fc):
        
        target_time = ipv4_fc.time if ipv4_fc.time else self.root.event_time

        # source -> dest (dest_port)
        source_dest_json = None
        self.splunk_query(f"""index=carbonblack event_type=netconn local_ip={ipv4_fc.source} remote_ip={ipv4_fc.dest} remote_port={ipv4_fc.dest_port} | fields *""", target_time)
        if self.search_results is not None:
            source_dest_json = self.json()

        # dest -> source (src_port)
        dest_source_json = None
        self.splunk_query(f"""index=carbonblack event_type=netconn local_ip={ipv4_fc.dest} remote_ip={ipv4_fc.source} remote_port={ipv4_fc.source_port} | fields *""", target_time)
        if self.search_results is not None:
            dest_source_json = self.json()

        if source_dest_json is None and dest_source_json is None:
            return False

        analysis = self.create_analysis(ipv4_fc)
        if source_dest_json is not None:
            analysis.details.extend(source_dest_json)
        if dest_source_json is not None:
            analysis.details.extend(dest_source_json)

        procs = [(p['process_guid'], parse_event_time(p['_time'])) for p in analysis.details if 'process_guid' in p]
        for process_guid, event_time  in procs[:self.process_guid_limit]:
            process_guid = analysis.add_observable(F_PROCESS_GUID, process_guid, event_time)

        return True
Example #3
0
def submit():

    if KEY_ANALYSIS not in request.values:
        abort(
            Response(
                "missing {} field (see documentation)".format(KEY_ANALYSIS),
                400))

    r = json.loads(request.values[KEY_ANALYSIS])

    # the specified company needs to match the company of this node
    # TODO eventually we'll have a single node that serves API to all configured companies

    if KEY_COMPANY_NAME in r and r[KEY_COMPANY_NAME] != saq.CONFIG['global'][
            'company_name']:
        abort(
            Response(
                "wrong company {} (are you sending to the correct system?)".
                format(r[KEY_COMPANY_NAME]), 400))

    if KEY_DESCRIPTION not in r:
        abort(
            Response("missing {} field in submission".format(KEY_DESCRIPTION),
                     400))

    root = RootAnalysis()
    root.uuid = str(uuid.uuid4())

    # does the engine use a different drive for the workload?
    analysis_mode = r[
        KEY_ANALYSIS_MODE] if KEY_ANALYSIS_MODE in r else saq.CONFIG['engine'][
            'default_analysis_mode']
    if analysis_mode != ANALYSIS_MODE_CORRELATION:
        root.storage_dir = workload_storage_dir(root.uuid)
    else:
        root.storage_dir = storage_dir_from_uuid(root.uuid)

    root.initialize_storage()

    try:

        root.analysis_mode = r[
            KEY_ANALYSIS_MODE] if KEY_ANALYSIS_MODE in r else saq.CONFIG[
                'engine']['default_analysis_mode']
        root.company_id = saq.CONFIG['global'].getint('company_id')
        root.tool = r[KEY_TOOL] if KEY_TOOL in r else 'api'
        root.tool_instance = r[
            KEY_TOOL_INSTANCE] if KEY_TOOL_INSTANCE in r else 'api({})'.format(
                request.remote_addr)
        root.alert_type = r[KEY_TYPE] if KEY_TYPE in r else saq.CONFIG['api'][
            'default_alert_type']
        root.description = r[KEY_DESCRIPTION]
        root.event_time = LOCAL_TIMEZONE.localize(datetime.datetime.now())
        if KEY_EVENT_TIME in r:
            try:
                root.event_time = parse_event_time(r[KEY_EVENT_TIME])
            except ValueError as e:
                abort(
                    Response(
                        "invalid event time format for {} (use {} format)".
                        format(r[KEY_EVENT_TIME],
                               event_time_format_json_tz), 400))

        root.details = r[KEY_DETAILS] if KEY_DETAILS in r else {}

        # go ahead and allocate storage
        # XXX use temp dir instead...

        if KEY_TAGS in r:
            for tag in r[KEY_TAGS]:
                root.add_tag(tag)

        # add the observables
        if KEY_OBSERVABLES in r:
            for o in r[KEY_OBSERVABLES]:
                # check for required fields
                for field in [KEY_O_TYPE, KEY_O_VALUE]:
                    if field not in o:
                        abort(
                            Response(
                                "an observable is missing the {} field".format(
                                    field), 400))

                o_type = o[KEY_O_TYPE]
                o_value = o[KEY_O_VALUE]
                o_time = None
                if KEY_O_TIME in o:
                    try:
                        o_time = parse_event_time(o[KEY_O_TIME])
                    except ValueError:
                        abort(
                            Response(
                                "an observable has an invalid time format {} (use {} format)"
                                .format(o[KEY_O_TIME],
                                        event_time_format_json_tz), 400))

                observable = root.add_observable(o_type,
                                                 o_value,
                                                 o_time=o_time)

                if KEY_O_TAGS in o:
                    for tag in o[KEY_O_TAGS]:
                        observable.add_tag(tag)

                if KEY_O_DIRECTIVES in o:
                    for directive in o[KEY_O_DIRECTIVES]:
                        # is this a valid directive?
                        if directive not in VALID_DIRECTIVES:
                            abort(
                                Response(
                                    "observable {} has invalid directive {} (choose from {})"
                                    .format('{}:{}'.format(o_type,
                                                           o_value), directive,
                                            ','.join(VALID_DIRECTIVES)), 400))

                        observable.add_directive(directive)

                if KEY_O_LIMITED_ANALYSIS in o:
                    for module_name in o[KEY_O_LIMITED_ANALYSIS]:
                        observable.limit_analysis(module_name)

        # save the files to disk and add them as observables of type file
        for f in request.files.getlist('file'):
            logging.debug("recording file {}".format(f.filename))
            #temp_dir = tempfile.mkdtemp(dir=saq.CONFIG.get('api', 'incoming_dir'))
            #_path = os.path.join(temp_dir, secure_filename(f.filename))
            try:
                #if os.path.exists(_path):
                #logging.error("duplicate file name {}".format(_path))
                #abort(400)

                #logging.debug("saving file to {}".format(_path))
                #try:
                #f.save(_path)
                #except Exception as e:
                #logging.error("unable to save file to {}: {}".format(_path, e))
                #abort(400)

                full_path = os.path.join(root.storage_dir, f.filename)

                try:
                    dest_dir = os.path.dirname(full_path)
                    if not os.path.isdir(dest_dir):
                        try:
                            os.makedirs(dest_dir)
                        except Exception as e:
                            logging.error(
                                "unable to create directory {}: {}".format(
                                    dest_dir, e))
                            abort(400)

                    logging.debug("saving file {}".format(full_path))
                    f.save(full_path)

                    # add this as a F_FILE type observable
                    root.add_observable(
                        F_FILE,
                        os.path.relpath(full_path, start=root.storage_dir))

                except Exception as e:
                    logging.error(
                        "unable to copy file from {} to {} for root {}: {}".
                        format(_path, full_path, root, e))
                    abort(400)

            except Exception as e:
                logging.error("unable to deal with file {}: {}".format(f, e))
                report_exception()
                abort(400)

            #finally:
            #try:
            #shutil.rmtree(temp_dir)
            #except Exception as e:
            #logging.error("unable to delete temp dir {}: {}".format(temp_dir, e))

        try:
            if not root.save():
                logging.error("unable to save analysis")
                abort(
                    Response(
                        "an error occured trying to save the alert - review the logs",
                        400))

            # if we received a submission for correlation mode then we go ahead and add it to the database
            if root.analysis_mode == ANALYSIS_MODE_CORRELATION:
                ALERT(root)

            # add this analysis to the workload
            root.schedule()

        except Exception as e:
            logging.error("unable to sync to database: {}".format(e))
            report_exception()
            abort(
                Response(
                    "an error occured trying to save the alert - review the logs",
                    400))

        return json_result({'result': {'uuid': root.uuid}})

    except Exception as e:
        logging.error("error processing submit: {}".format(e))
        report_exception()

        try:
            if os.path.isdir(root.storage_dir):
                logging.info("removing failed submit dir {}".format(
                    root.storage_dir))
                shutil.rmtree(root.storage_dir)
        except Exception as e2:
            logging.error("unable to delete failed submit dir {}: {}".format(
                root.storage_dir, e))

        raise e
Example #4
0
def splunktime_to_saqtime(splunk_time):
    """Convert a splunk time in 2015-02-19T09:50:49.000-05:00 format to SAQ time format YYYY-MM-DD HH:MM:SS."""
    assert isinstance(splunk_time, str)
    return parse_event_time(splunk_time).strftime(event_time_format_json_tz)
Example #5
0
def splunktime_to_datetime(splunk_time):
    """Convert a splunk time in 2015-02-19T09:50:49.000-05:00 format to a datetime object."""
    assert isinstance(splunk_time, str)
    #return datetime.datetime.strptime(splunk_time.split('.')[0], '%Y-%m-%dT%H:%M:%S')
    return parse_event_time(splunk_time)
Example #6
0
    def test_api_analysis_submit(self, db, c):
        t = saq.LOCAL_TIMEZONE.localize(
            datetime.datetime(2017,
                              11,
                              11,
                              hour=7,
                              minute=36,
                              second=1,
                              microsecond=1)).astimezone(
                                  pytz.UTC).strftime(event_time_format_json_tz)
        result = self.client.post(
            url_for('analysis.submit'),
            data={
                'analysis':
                json.dumps(
                    {
                        'analysis_mode':
                        'analysis',
                        'tool':
                        'unittest',
                        'tool_instance':
                        'unittest_instance',
                        'type':
                        'unittest',
                        'description':
                        'testing',
                        'event_time':
                        t,
                        'details': {
                            'hello': 'world'
                        },
                        'observables': [
                            {
                                'type': F_IPV4,
                                'value': '1.2.3.4',
                                'time': t,
                                'tags': ['tag_1', 'tag_2'],
                                'directives': [DIRECTIVE_NO_SCAN],
                                'limited_analysis': ['basic_test']
                            },
                            {
                                'type': F_USER,
                                'value': 'test_user',
                                'time': t
                            },
                        ],
                        'tags': ['alert_tag_1', 'alert_tag_2'],
                    },
                    cls=_JSONEncoder),
                'file': (io.BytesIO(b'Hello, world!'), 'sample.dat'),
            },
            content_type='multipart/form-data')

        result = result.get_json()
        self.assertIsNotNone(result)

        self.assertTrue('result' in result)
        result = result['result']
        self.assertIsNotNone(result['uuid'])
        #self.assertIsNotNone(result['id'])

        uuid = result['uuid']
        #_id = result['id']

        result = self.client.get(url_for('analysis.get_analysis', uuid=uuid))
        result = result.get_json()
        self.assertIsNotNone(result)
        self.assertTrue('result' in result)
        result = result['result']

        self.assertEquals(result['analysis_mode'], 'analysis')
        self.assertEquals(result['tool'], 'unittest')
        self.assertEquals(result['tool_instance'], 'unittest_instance')
        self.assertEquals(result['type'], 'unittest')
        self.assertEquals(result['description'], 'testing')
        self.assertEquals(result['event_time'],
                          '2017-11-11T07:36:01.000001+0000')
        self.assertEquals(result['tags'][0], 'alert_tag_1')
        self.assertEquals(result['tags'][1], 'alert_tag_2')
        self.assertEquals(len(result['observable_store']), 3)

        file_uuid = None

        for o_uuid in result['observable_store']:
            o = result['observable_store'][o_uuid]
            if o['type'] == F_IPV4:
                self.assertEquals(o['type'], F_IPV4)
                self.assertEquals(o['value'], '1.2.3.4')
                self.assertEquals(o['time'], '2017-11-11T07:36:01.000001+0000')
                self.assertEquals(o['tags'][0], 'tag_1')
                self.assertEquals(o['tags'][1], 'tag_2')
                self.assertEquals(o['directives'][0], DIRECTIVE_NO_SCAN)
                self.assertEquals(o['limited_analysis'][0], 'basic_test')
            elif o['type'] == F_USER:
                self.assertEquals(o['type'], F_USER)
                self.assertEquals(o['value'], 'test_user')
                self.assertEquals(o['time'], '2017-11-11T07:36:01.000001+0000')
            elif o['type'] == F_FILE:
                self.assertEquals(o['type'], F_FILE)
                self.assertEquals(o['value'], 'sample.dat')
                self.assertIsNone(o['time'])
                self.assertIsNotNone(o['id'])
                file_uuid = o['id']

        # we should see a single workload entry
        c.execute(
            "SELECT id, uuid, node_id, analysis_mode FROM workload WHERE uuid = %s",
            (uuid, ))
        row = c.fetchone()
        self.assertIsNotNone(row)
        self.assertIsNotNone(row[0])
        self.assertEquals(row[1], uuid)
        self.assertEquals(row[2], saq.SAQ_NODE_ID)
        self.assertEquals(row[3], 'analysis')

        result = self.client.get(
            url_for('analysis.get_details',
                    uuid=uuid,
                    name=result['details']['file_path']))
        result = result.get_json()
        self.assertIsNotNone(result)
        result = result['result']
        self.assertTrue('hello' in result)
        self.assertEquals(result['hello'], 'world')

        result = self.client.get(
            url_for('analysis.get_file',
                    uuid=uuid,
                    file_uuid_or_name=file_uuid))
        self.assertEquals(result.status_code, 200)
        self.assertEquals(result.data, b'Hello, world!')

        result = self.client.get(
            url_for('analysis.get_file',
                    uuid=uuid,
                    file_uuid_or_name='sample.dat'))
        self.assertEquals(result.status_code, 200)
        self.assertEquals(result.data, b'Hello, world!')

        result = self.client.get(url_for('analysis.get_status', uuid=uuid))
        self.assertEquals(result.status_code, 200)
        result = result.get_json()
        self.assertIsNotNone(result)
        result = result['result']
        self.assertTrue('workload' in result)
        self.assertTrue('delayed_analysis' in result)
        self.assertTrue('locks' in result)
        self.assertEquals(result['delayed_analysis'], [])
        self.assertIsNone(result['locks'])
        self.assertTrue(isinstance(result['workload']['id'], int))
        self.assertEquals(result['workload']['uuid'], uuid)
        self.assertEquals(result['workload']['node_id'], saq.SAQ_NODE_ID)
        self.assertEquals(result['workload']['analysis_mode'], 'analysis')
        self.assertTrue(
            isinstance(parse_event_time(result['workload']['insert_date']),
                       datetime.datetime))