コード例 #1
0
    def __init__(self, **args):
        super(OpsecFetcherAdhoc, self).__init__(**args)

        ip_path = homing.home_join("storage/col/opsec_fetcher", self.device_ip)
        #Change directory to the ip of sever because the certificate files are present there
        os.chdir(ip_path)
        #The path of loggrabber utils
        utils_path = homing.home_join("installed/system/apps/opsec_tools")
        #The executor script to export logs
        self.loggrabber_executor = os.path.join(utils_path, "fw1-loggrabber")
        #The configuration to be used with -c
        self.loggrabber_conf = os.path.join(utils_path, "fw1-loggrabber-offline.conf")
        #The configuration file to be used with -l
        self.lea_conf_file = os.path.join(os.path.join(ip_path, "lea.conf"))
        #convert stattime and endtime to YYMMDDHHMMSS format from "2013-11-21 18:45:00" format
        self.convert_time()
コード例 #2
0
ファイル: make_zip.py プロジェクト: laxmi518/network_project
def create_zipped_application_packages(basedir):

    basedir = basedir.replace('$LOGINSPECT_HOME', homing.LOGINSPECT_HOME)
    if os.path.exists(basedir):
        shutil.rmtree(basedir)

    disk.prepare_path(basedir)

    applications = []
    apps_path = homing.home_join('storage/col/fileinspect_applications/')
    for path in os.listdir(apps_path):
        if os.path.isdir(os.path.join(apps_path, path)):
            applications.append(path)

    for dirname, subdirs, files in os.walk(apps_path):
        for f in files:
            if f.endswith(".pyc"):
                os.unlink(os.path.join(dirname, f))

    for app in applications:
        outfilename = os.path.join(basedir, '%s.fi' % app)
        try:
            zf = zipfile.PyZipFile(outfilename, mode='w')
            zf.writepy(os.path.join(apps_path, app))
        finally:
            zf.close()
    return
コード例 #3
0
def main():
    config = _parse_args()
    zmq_context = zmq.Context()

    db_file = homing.home_join('storage/col/filesystem_collector', 'checksums.shelve')
    disk.prepare_path(db_file)

    cursor_shelve = shelve.open(db_file, protocol=2)
    watcher.monitor(config, cursor_shelve, zmq_context)
コード例 #4
0
def fetch_job(sid, config, opsecfetcher_out):
    col_type = config['col_type']
    device_name = config['client_map'][sid]['device_name']
    collected_at = config['loginspect_name']

    ip = config['client_map'][sid]['lea_server_ip']
    client_dn   = config['client_map'][sid]['client_dn']
    server_dn = config['client_map'][sid]['server_dn']
    normalizer = config['client_map'][sid]['normalizer']
    repo = config['client_map'][sid]['repo']

    mem_file = get_memory_file(ip)
    #fetches fw logs from lea server using the lea.conf file specified
    try:
        loc, starttime = get_loc_starttime_if_exists(mem_file)
        from_beginning = False
    except:
        loc = -1
        from_beginning = True
    os.chdir(os.path.join(storage_path, ip))
    loggrabber_path = homing.home_join('installed/col/apps/opsec_fetcher/utils')
    loggrabber = os.path.join(loggrabber_path, 'fw1-loggrabber')
    loggrabber_conf = os.path.join(loggrabber_path, 'fw1-loggrabber.conf')
    lea_conf_file = get_config_file_path(ip, client_dn, server_dn)

    if from_beginning:
        proc = subprocess.Popen([loggrabber, '-l', lea_conf_file, \
                                             '-c', loggrabber_conf], \
                                 stdout=subprocess.PIPE
                                 )

    else:
        proc = subprocess.Popen([loggrabber, '-l', lea_conf_file, \
                                             '-c', loggrabber_conf, \
                                             '--filter', 'starttime=%s' % starttime], \
                                stdout=subprocess.PIPE
                                )

    t_loc, t_time = None, None
    while True:
        line = proc.stdout.readline()
        if not line:
            break

        data, t_loc, t_time = _make_ready(line, from_beginning, loc)
        if data['msg']:
            _handle_data(data, col_type, ip, device_name, collected_at, opsecfetcher_out, normalizer, repo)

#    for each in proc.stdout:
#        data, t_loc, t_time = _make_ready(each, from_beginning, loc)
#        if data['msg']:
#            _handle_data(data, col_type, ip, device_name, collected_at, opsecfetcher_out)

    if t_loc == None or t_time == None: #means no data in this fetch
        return
    dump_loc_time_to_file(mem_file, t_loc, t_time)
コード例 #5
0
    def __init__(self, config, sid, opsecfetcher_out):
        """
        """
        self.col_type = config["col_type"]
        self.collected_at = config["loginspect_name"]

        self.set_fields(sid, config["client_map"][sid])
        self.opsecfetcher_out = opsecfetcher_out


        ip_path = homing.home_join("storage/col/opsec_fetcher", self.device_ip)
        #Change directory to the ip of sever because the certificate files are present there
        os.chdir(ip_path)
        #The path of loggrabber utils
        utils_path = homing.home_join("installed/system/apps/opsec_tools")
        #The executor script to export logs
        self.loggrabber_executor = os.path.join(utils_path, "fw1-loggrabber")
        #The configuration to be used with -c
        self.loggrabber_conf = os.path.join(utils_path, "fw1-loggrabber-online.conf")
        #The configuration file to be used with -l
        self.lea_conf_file = os.path.join(os.path.join(ip_path, "lea.conf"))
コード例 #6
0
def _get_conf_file_path(ip, opsec_sic_name, lea_server_opsec_entity_sic_name):
    #make a new lea.cong file and return its path
    config_path = homing.home_join('storage/col/opsec_fetcher/', ip,
                                   'lea.conf')
    disk.prepare_path(config_path)
    with open(config_path, 'w') as conf_file:
        conf_file.write("lea_server auth_type ssl_opsec\n")
        conf_file.write("lea_server ip %s\n" % ip)
        conf_file.write("lea_server auth_port 18184\n")
        conf_file.write("opsec_sic_name %s\n" % opsec_sic_name)
        conf_file.write("opsec_sslca_file %s\n" % os.path.abspath("lea.conf"))
        conf_file.write("lea_server_opsec_entity_sic_name %s" %
                        lea_server_opsec_entity_sic_name)

    return config_path
コード例 #7
0
def test_opsec_fetcher(ip, object_name, sic_one_timer_password, secret_key,
                       opsec_sic_name, lea_server_opsec_entity_sic_name):
    if is_certificate_present(ip) == False:
        result = generate_certificate(ip, object_name, sic_one_timer_password,
                                      secret_key)
        if result['success'] == False:
            return {'success': False, 'ip': ip, 'msg': result['msg']}

    os.chdir(os.path.join(storage_path, ip))
    loggrabber_path = homing.home_join(
        'installed/col/apps/opsec_fetcher/utils')
    loggrabber = os.path.join(loggrabber_path, 'fw1-loggrabber')
    loggrabber_conf = os.path.join(loggrabber_path, 'fw1-loggrabber.conf')
    lea_conf_file = get_config_file_path(ip, opsec_sic_name,
                                         lea_server_opsec_entity_sic_name)
    proc = subprocess.Popen([loggrabber, '-l', lea_conf_file, \
                                         '-c', loggrabber_conf, \
                                         '--debug-level', '1'], \
                             stdout=subprocess.PIPE, \
                             stderr=subprocess.PIPE)

    if proc.stdout:
        return {
            'success': True,
            'ip': ip,
            'msg': 'Logs successfully retrieved'
        }
    else:
        while True:
            line = proc.stderr.readline()
            if not line:
                break

            error, err_msg = get_error(line)
            if error:
                logging.warn('Test Failed with error %s' % err_msg)
                return {'success': False, 'ip': ip, 'msg': err_msg}

        logging.warn('Test Failed. No Output')
        return {'success': False, 'ip': ip, 'msg': 'No Output'}
コード例 #8
0
import time
import cPickle
from dateutil import parser as date_parser
from gevent.server import StreamServer
from gevent import socket
from xml.dom import minidom

from libcol.parsers import GetParser, InvalidParserException
from libcol import config_reader
from pylib import logger, homing, inet, msgfilling, textual, mongo

log = logger.getLogger(__name__)

certificate_file_path = homing.home_join(
    'etc/remote_connection/certificates/ssl.crt')
key_file_path = homing.home_join('etc/remote_connection/certificates/ssl.key')

present_field_names = [
    "Applicaion", "ClientAddress", "DestAddress", "DestPort", "EventID",
    "EventRecordID", "EventSourceName", "HandleId", "LogonType", "ObjectName",
    "Process", "ProcessID", "SourceAddress", "SourcePort", "TargetDomain",
    "TargetUserName", "UserName", "IpAddress", "IpPort", "NewProcessName",
    "ObjectName", "PackageName", "ProcessId", "ProcessName", "ServiceName",
    "Status", "SubjectDomainName", "SubjectLogonId", "SubjectUserName",
    "TargetDomainName", "TargetLogonId", "TargetUserName",
    "TokenElevationType", "WorkstationName"
]

field_names_renamed = [
    "application", "source_address", "destination_address", "destination_port",
    "event_id", "record_number", "event_category", "handle_id", "logon_type",
コード例 #9
0
import os
import time
import logging
import shelve

from pylib import msgfilling, homing, disk, cipher
from libcol.parsers import GetParser, InvalidParserException

#Note to self: try using object composition instead of inheritanche

COL_STORAGE_PATH = homing.home_join("storage/col/")
MEMORY_FILE = "memory.mem"


class Fetcher(object):
    """
    This is an interface from which all the fetchers will be written.
    For each sid in the config, a new instance of this class gets instanciated.
    So the device_ip, device_name etc.
        fields for the given client_map are directly the class members
        of the instance of this class
    """
    def __init__(self, **args):
        """
        Nothing to init
        """
        self.__log_counter = 0
        self.__last_col_ts = int(time.time())
        """
        Args contains sid, client_map and fetcher_runner instance
        """
コード例 #10
0
import subprocess
import os
import logging

from pylib import homing, disk
from configuration import generate_certificate, is_certificate_present
storage_path = homing.home_join('storage/col/opsec_fetcher/')


def get_config_file_path(ip, opsec_sic_name, lea_server_opsec_entity_sic_name):
    #make a new lea.cong file and return its path
    config_path = homing.home_join('storage/col/opsec_fetcher/%s/' % ip)
    lea_conf_path = os.path.join(config_path, 'lea.conf')
    opsecp12_path = os.path.join(config_path, 'opsec.p12')
    disk.prepare_path(config_path)
    with open(lea_conf_path, 'w') as conf_file:
        conf_file.write("lea_server auth_type ssl_opsec\n")
        conf_file.write("lea_server ip %s\n" % ip)
        conf_file.write("lea_server auth_port 18184\n")
        conf_file.write("opsec_sic_name %s\n" % opsec_sic_name)
        conf_file.write("opsec_sslca_file %s\n" % opsecp12_path)
        conf_file.write("lea_server opsec_entity_sic_name %s" %
                        lea_server_opsec_entity_sic_name)

    return lea_conf_path


def get_error(line):
    error = ''
    if line.rfind('ERROR:') != -1:
        error = line
コード例 #11
0
import os
import re
import logging

from pylib.make_greenletsafe_fabric import patch; patch()
from pylib.ipv6_fabric import patch; patch()

from fabric.api import env, get, hide
from fabric.sftp import SFTP

from pylib import homing

private_keyfile = homing.home_join("storage/col/scp_fetcher/ssh/id_rsa")

def setup(ip, port, user, password=None):
    env.linewise = True
    env.abort_on_prompts = True
    env.no_keys = True

    env.host_string = "%s@%s:%s" % (user, ip, port)

    if password:
        env.key_filename = None
        env.password = password
    else:
        env.key_filename = private_keyfile
        env.password = None

def _check_regex_match(pattern, file_path):
    if not pattern:
コード例 #12
0
import os
import glob
import logging
import subprocess

from pylib import homing, disk

STORAGE_PATH = homing.home_join("storage/col/opsec_fetcher/")
PULLERS_PATH = homing.home_join(
    'installed/col/lib/libcol/collectors/opsec_fetcher/')


class OpsecCommunicator(object):
    def __init__(self, ip, object_name, sic_pwd, putkey_pwd):
        self.ip = ip
        self.object_name = object_name
        self.sic_pwd = sic_pwd
        self.putkey_pwd = putkey_pwd

        self.cert_path = os.path.join(STORAGE_PATH, ip) + "/"
        self.opsec_pull_cert = os.path.join(PULLERS_PATH, "opsec_pull_cert")
        self.opsec_putkey = os.path.join(PULLERS_PATH, "opsec_putkey")

        #make new dir for this ip to store its certificates
        disk.prepare_path(self.cert_path)
        #cd to the newly created dir to perform certificate actions
        os.chdir(self.cert_path)

    def is_certificate_present(self):
        """
        Return true is certificate files are present
コード例 #13
0
def _handle_message_request(sock, addr, config, fi_out, db):
    global LAST_COL_TS
    global LOG_COUNTER

    log.debug("tcp collector; %s connected;" % str(addr))
    try:
        client_map = config["client_map"]
        client_ip = inet.get_ip(addr)
        config_ip = config_reader.get_config_ip(client_ip, config)

        sid, parser = _get_sid_parser(client_ip, config, config_ip)
        if not parser:
            return

        device_name = config["client_map"][config_ip]["device_name"]
        while True:
            data = sock.recv(4096)
            if not data:
                break

            try:
                message = cPickle.loads(zlib.decompress(data))
            except:
                #in case if complete data is not received
                try:
                    data += sock.recv(4096)
                    message = cPickle.loads(zlib.decompress(data))
                except:
                    log.warn("Dropping the log; log is more than 4 KB")
                    sock.send(zlib.compress(cPickle.dumps({"received":
                                                           False})))
                    continue

            if message.get("send_app_file"):
                app_name = message["app_name"]
                app_content = open(
                    homing.home_join("storage/col/logpointagent/%s.fi" %
                                     app_name), "rb").read()
                sock.send(str(len(app_content)) + "\n" + app_content)
                log.warn("Application file for %s sent to client %s" %
                         (app_name, client_ip))
                continue

            if message.get("heartbeat_request"):
                client_id = message["client_id"]
                db_fi_client = db.fileinspectclients.find_one(
                    {"ip": client_ip})
                if not db_fi_client:
                    log.warn(
                        "Received first request from LogPoint agent with ip=%s and id=%s"
                        % (client_ip, client_id))
                    db.fileinspectclients.insert(
                        {
                            "ip": client_ip,
                            "client_id": client_id,
                            "config_changed": True
                        },
                        safe=True)
                    sock.send(
                        zlib.compress(
                            cPickle.dumps({
                                "type":
                                1,
                                "message":
                                "No applications added for this LogPoint Agent in LogPoint",
                                "pdict_using_apps": ["file_system_collector"]
                            })))
                elif db_fi_client and not db_fi_client.get("applications"):
                    log.warn(
                        "Add applciations for LogPoint Agent with ip=%s and id=%s"
                        % (client_ip, client_id))
                    sock.send(
                        zlib.compress(
                            cPickle.dumps({
                                "type":
                                1,
                                "message":
                                "No applications added for this LogPoint Agent in LogPoint",
                                "pdict_using_apps": ["file_system_collector"]
                            })))
                elif db_fi_client.get("applications") and (
                        message.get("first_fetch")
                        or db_fi_client["config_changed"]):
                    log.warn(
                        "Received config request from LogPoint agent with ip=%s and id=%s"
                        % (client_ip, client_id))
                    client_config = _get_client_config(
                        db_fi_client["applications"])
                    if not client_config.get("apps"):
                        sock.send(
                            zlib.compress(
                                cPickle.dumps({
                                    "type":
                                    1,
                                    "message":
                                    "No applications added for this LogPoint Agent in LogPoint",
                                    "pdict_using_apps":
                                    ["file_system_collector"]
                                })))
                    else:
                        sock.send(
                            zlib.compress(
                                cPickle.dumps({
                                    "type": 2,
                                    "config": client_config
                                })))
                        db.fileinspectclients.update({"ip": client_ip}, {
                            "$set": {
                                "client_id": client_id,
                                "config_changed": False
                            }
                        })
                else:
                    log.warn(
                        "Received heartbeat request from LogPoint agent with ip=%s and id=%s"
                        % (client_ip, client_id))
                    sock.send(zlib.compress(cPickle.dumps({"type": 0})))
                continue

            client_id = message['id']

            if message.get('message') and message.get('app_name'):
                app_name = message['app_name']

                extra_info = message.get('extra_info') or {}

                fi_out.start_benchmarker_processing()

                if app_name == "windows_eventlog_reader":
                    event = {
                        "msg": textual.utf8(message["message"]),
                        "_type_str": "msg"
                    }
                    if extra_info.get("_is_event_xml"):
                        extra_info.pop("_is_event_xml")
                        #try:
                        #    more_info = _get_extra_key_values_from_xml(message["message"])
                        #except:
                        #    more_info = {}
                        #    log.warn("Couldnot parse windows xml event log sent from LogPoint Agent")
                        #if more_info:
                        #    extra_info.update(more_info)
                    parser_data = [event]
                else:
                    parser.write(textual.utf8(message['message']),
                                 old_parser=True)
                    parser_data = []
                    if parser:
                        for event in parser:
                            if event:
                                parser_data.append(event)

                for event in parser_data:
                    col_ts = int(time.time())
                    if col_ts > LAST_COL_TS:
                        LAST_COL_TS = col_ts
                        LOG_COUNTER = 0

                    mid_prefix = '%s|%s|%s|%s|' % (config['loginspect_name'],
                                                   config['col_type'],
                                                   config_ip, col_ts)

                    LOG_COUNTER += 1
                    event['mid'] = mid_prefix + "%d" % LOG_COUNTER
                    event['device_name'] = device_name
                    event['device_ip'] = client_ip
                    event['collected_at'] = config['loginspect_name']
                    event['col_ts'] = col_ts
                    event['_counter'] = LOG_COUNTER
                    event['col_type'] = config['col_type']

                    msgfilling.add_types(event, '_type_str', 'device_name')
                    msgfilling.add_types(event, '_type_ip', 'device_ip')
                    msgfilling.add_types(event, '_type_str', 'device_ip')
                    msgfilling.add_types(event, '_type_str', 'collected_at')
                    msgfilling.add_types(event, '_type_num', 'col_ts')
                    msgfilling.add_types(event, '_type_str', 'col_type')

                    event['_normalized_fields'] = {}
                    event['_normalized_fields']['app_name'] = message[
                        'app_name']
                    event['_normalized_fields']['lp_agent_id'] = client_id

                    msgfilling.add_types(event, '_type_str', 'app_name')
                    msgfilling.add_types(event, '_type_str', 'lp_agent_id')

                    if extra_info:
                        #event.update(extra_info)
                        for key, value in extra_info.iteritems():
                            if type(value) is int:
                                msgfilling.add_types(event, '_type_num', key)
                            else:
                                msgfilling.add_types(event, '_type_str', key)
                            event['_normalized_fields'][key] = value

                    log.debug('sending message to normalizer: %s' % event)

                    event['repo'] = config['client_map'][config_ip]['repo']
                    event['normalizer'] = config['client_map'][config_ip][
                        'normalizer']

                    fi_out.send_with_mid(event)

                sock.send(zlib.compress(cPickle.dumps({'received': True})))
            else:
                sock.send(zlib.compress(cPickle.dumps({'received': False})))
    except Exception, e:
        log.warn('logpooint agent collector exception: %s' % str(e))
コード例 #14
0
import os
import shutil
import subprocess
import logging
from pylib import homing, disk

storage_path = homing.home_join('storage/col/opsec_fetcher/')
certificate_path = homing.home_join(
    'installed/col/lib/libcol/collectors/opsec_fetcher/')


def remove_certificates(ip):
    try:
        shutil.rmtree(os.path.join(storage_path, ip))
    except OSError, err:
        logging.warn(err)


def remove_certificate_if_exists(ip):
    #if os.path.exists(os.path.join(storage_path, ip, 'opsec.p12')) or \
    #        os.path.exists(os.path.join(storage_path, ip, 'sslauthkeys.C')) or \
    #            os.path.exists(os.path.join(storage_path, ip, 'sslsess.C')):
    if not is_certificate_present(ip):
        logging.warn(
            '''Partial Certificate info exists. Removing the incomplete info
                            and generating a new certificate. Please use the fw_put_key command on the server
                            and RESET the SIC communication as well''')
        remove_certificates(ip)


def is_certificate_present(ip):
コード例 #15
0
import os
import re
import base64
import json
import logging as log
from jinja2 import Template, TemplateSyntaxError
from flask import Markup
from pylib import disk, homing, textual
from lib import dboperation
from views.contrib.mongoid import is_mongokit_objectid
from pluggables.modules.Notification.controller import Notification

ALERT_TEMPLATES_PATH = homing.home_join('storage/alerttemplates/')
ALERT_SSH_CERTIFICATES_PATH = homing.home_join('storage/alertssh/')


class NewTestEmailNotification(Notification):
    def __init__(self, classname, source_address, params, module=None):
        Notification.__init__(self, classname, source_address, params, module)

    def _regex_replacer(self, matchobj):
        command = matchobj.group(1)
        if command == 'readable':
            parameter = 'type, format, timezone, mapped_aliases'
        else:
            parameter = 'format, timezone'

        return '| %s(%s)}}' % (command, parameter)

    def pre_extract(self):
        #Collection is 'Alert' by default. Make it dynamic later
コード例 #16
0
from LineParser import LineParser
from SyslogParser import SyslogParser
from WmiParser import WmiParser
from StackTraceParser import StackTraceParser
from RegexParser import RegexParser
from SnareParser import SnareParser

import os
import glob

from pylib import homing

PARSERS_PATH = homing.home_join("installed/col/lib/libcol/parsers/")


def get_plug_parsers_list():
    """Returns the list of ConfigGen files
    """
    parsers = glob.glob(os.path.join(PARSERS_PATH, "*.py"))

    plug_parsers = []
    for parser in parsers:
        pg = os.path.basename(parser)
        if pg not in["__init__.py", "LineParser.py", "SyslogParser.py", "WmiParser.py", "StackTraceParser.py",
                     "RegexParser.py", "SnareParser.py", "LIv4Parser.py", "LIv4SNMPParser.py", "NewSyslogParser.py"]:
            plug_parsers.append(pg.strip(".py"))
    return plug_parsers


class InvalidParserException(Exception):