コード例 #1
0
def datasets(request):
    datasets = Dataset.objects.all().only('id', 'name', 'app', 'nb_logs',
                                          'built', 'error', 'svm_built')
    repo = MongoDBRepository.objects.get(repo_name='Vulture Internal Database')
    client = MongoDBClient(repo)
    con = client._get_connection()
    db_logs = con.logs
    cols = db_logs.collection_names()
    datasets_learning = []

    for col in cols:
        if not col.startswith('learning_'):
            continue

        try:
            app = Application.objects.only('name').with_id(ObjectId(col[9:]))
            uris = get_dataset_learning("learning_" + str(app.id), length=True)
            dataset = {
                "name": col,
                "phony": app.name + " Defender Whitelist",
                "size": uris
            }
            datasets_learning.append(dataset)
        except Exception:
            continue

    return render_to_response('datasets.html', {
        'datasets': datasets,
        'datasets_learning': datasets_learning
    },
                              context_instance=RequestContext(request))
コード例 #2
0
    def _fetch_data_from_mongo(self):
        mongo = MongoDBClient(self.repo)
        result = mongo.search(self.params)
        self.dataset.nb_logs = 0

        for row in result['data']:
            logs = Logs()
            logs = self._full_object(logs, row)
            logs.type_logs = self.params['type_logs']
            self.dataset.logs.append(logs)
            self.dataset.nb_logs += 1

        try:
            self.dataset.built = True
            logger.info("Dataset build")
            self.dataset.save()
            logger.info("Dataset saved")
        except Exception as e:
            logger.error("Failed to build dataset : ")
            logger.exception(e)
            self.dataset.built = False
            self.dataset.logs = []
            self.dataset.nb_logs = 0
            self.dataset.error = "Can't built dataset, too many logs"
            self.dataset.save()
コード例 #3
0
    def __init__(self):
        cluster = Cluster.objects.get()

        for logs_repository in BaseAbstractRepository.get_data_repositories():
            if logs_repository.is_internal:
                break

        self.log_rotate = cluster.system_settings.global_settings.log_rotate
        self.client = MongoDBClient(logs_repository)
コード例 #4
0
def get_collection(collection_name):
    """ Connect to MongoDB with MongoClient on internal database
         & return a pymongo.collection.Collection instance
    :param   collection_name  The collection name to connect to
    :return  A pymongo.collection.Collection object 
             or raise pymongo.errors.InvalidName if invalid name
    """
    repo = MongoDBRepository.objects.get(repo_name='Vulture Internal Database')
    client = MongoDBClient(repo)
    con = client._get_connection()
    db_logs = con.logs
    return db_logs[collection_name]
コード例 #5
0
    def __init__(self, type_logs):
        """
        Open the specified file and use it as the stream for logging.
        """
        # keep the absolute path, otherwise derived classes which use this
        # may come a cropper when the current directory changes
        self._name = "Database Handler"
        self.filters = []
        self.lock = None

        cluster = Cluster.objects.get()
        try:
            logs_repository = cluster.system_settings.global_settings.repository
            if not logs_repository:
                for logs_repository in BaseAbstractRepository.get_data_repositories(
                ):
                    if logs_repository.is_internal:
                        break
        except:
            for logs_repository in BaseAbstractRepository.get_data_repositories(
            ):
                if logs_repository.is_internal:
                    break

        if logs_repository.type_uri == 'mongodb':
            self.client = MongoDBClient(logs_repository)
        elif logs_repository.type_uri == 'elasticsearch':
            self.client = ElasticSearchClient(logs_repository)

        self.type_logs = type_logs
コード例 #6
0
def update_learning_collections():
    """Remove duplicates in the learning_{id} collections of the logs MongoDB
    database.
    """
    repo = MongoDBRepository.objects.get(repo_name="Vulture Internal Database")

    logger.info("Connecting to MongoDB")

    client = MongoDBClient(repo)
    con = client._get_connection()
    db_logs = con.logs

    try:
        logger.debug("Creating new collections")
        create_new_collections(db_logs)
    except Exception, e:
        logger.error("Something went wrong: some collections weren't processed: "
                     "backing up the database :")
        logger.exception(e)
        delete_new_learning_collections(db_logs)
        raise UpdateCrash("Failed to create new collections")
コード例 #7
0
def get_rules_wl_bl(request):
    app_id = request.POST['app_id']
    log_id = request.POST['log_id']
    rules, values = [], []

    app = Application.objects.with_id(ObjectId(app_id))
    repo = app.log_custom.repository

    if repo.type_uri == 'mongodb':
        params = {'type_logs': request.POST['type_logs']}
        mongo_client = MongoDBClient(repo)
        result = mongo_client.search(params, id_query=log_id)['data']

    elif repo.type_uri == 'elasticsearch':
        elastic_client = ElasticSearchClient(repo)
        result = elastic_client.search(id_query=log_id)
        result = result['data']['hits']['hits'][0]['_source']

    temp_values = result.values()

    for val in temp_values:
        if isinstance(val, dict):
            [values.append(x) for x in val.values()]
        elif isinstance(val, list):
            [values.append(x) for x in val]
        else:
            values.append(val)

    for val in values:
        if val == "":
            continue

        try:
            for rule in ModSecRules.objects.filter(rs=app.wl_bl_rules,
                                                   rule_content__contains=val):
                rules.append("{}|{}".format(rule.id, rule.rule_content))
        except:
            pass

    return JsonResponse({'rules': json.dumps(list(set(rules)))})
コード例 #8
0
class LogRotate:
    def __init__(self):
        cluster = Cluster.objects.get()

        for logs_repository in BaseAbstractRepository.get_data_repositories():
            if logs_repository.is_internal:
                break

        self.log_rotate = cluster.system_settings.global_settings.log_rotate
        self.client = MongoDBClient(logs_repository)

    def delete_logs(self):
        if self.log_rotate == 0:
            return True

        lastDate = (datetime.datetime.now() -
                    datetime.timedelta(days=self.log_rotate - 1))
        self.client.delete_logs(lastDate)

        lastDate = (datetime.datetime.now() - datetime.timedelta(days=30))
        [m.delete() for m in Monitor.objects.get(time__lt=lastDate)]
        return True
コード例 #9
0
ファイル: monitor.py プロジェクト: underscoredje/vulture3-gui
def traffic(request):
    apps = []
    for app in Application.objects():
        if app.log_custom.repository_type == 'data':
            apps.append(app)

    if not request.is_ajax():
        cluster = Cluster.objects.get()

        loganalyser_settings = cluster.system_settings.loganalyser_settings
        rules = loganalyser_settings.loganalyser_rules

        tags = []
        for rule in rules:
            tags.extend(rule.tags.split(','))        

        return render_to_response('monitor_traffic.html', {'apps': apps, 'tags': set(tags)},
            context_instance=RequestContext(request))



    codes   = json.loads(request.POST['codes'])
    apps_id = json.loads(request.POST['apps_id'])
    tags    = json.loads(request.POST['tags'])
    if apps_id is not None:
        repos = {}
        for app_id in apps_id:
            app = Application.objects.with_id(ObjectId(app_id))
            try:
                repos[app.log_custom.repository].append(app)
            except:
                repos[app.log_custom.repository] = [app]

    else:
        repos = {}
        for app in apps:
            try:
                repos[app.log_custom.repository].append(app)
            except:
                repos[app.log_custom.repository] = [app]

    now    = datetime.datetime.utcnow()
    before = now - datetime.timedelta(minutes=10)

    params = {
        'codes'    : codes,
        'tags'     : tags,
        'startDate': before,
        'endDate'  : now
    }

    results, max_n = {}, 0
    for repo, apps in repos.items():
        params['apps'] = apps

        if repo.type_uri == 'mongodb':
            client = MongoDBClient(repo)

        elif repo.type_uri == 'elasticsearch':
            client = ElasticSearchClient(repo)

        for key, value in client.map(params).items():
            try:
                results[key] += value
            except KeyError:
                results[key] = value
                
            if value > max_n:
                max_n = value

    return JsonResponse({
        'results': results,
        'max'    : max_n
    })
コード例 #10
0
sys.path.append('/home/vlt-gui/vulture')
os.environ.setdefault("DJANGO_SETTINGS_MODULE", 'vulture.settings')

import django
django.setup()

from gui.models.monitor_settings import Monitor
from gui.models.repository_settings import MongoDBRepository
from vulture_toolkit.log.mongodb_client import MongoDBClient

if __name__ == '__main__':

    try:
        repo = MongoDBRepository.objects.get(
            repo_name='Vulture Internal Database')
        client = MongoDBClient(repo)
        con = client._get_connection()
        if not con.is_primary:
            print("Current node is not master mongo, quitting.")
            sys.exit(0)

        # Ensure index exists on Monitor collection (vulture database)
        Monitor.ensure_indexes()
        # And create index on access collection (logs database)
        db_logs = con.logs
        db_logs['access'].create_index([("app_name", 1), ("time", 1)])

        print("Indexes creation done.")
    except Exception as e:
        print(
            "Failed to connect to MongoDB on node {}. Aborting index creation."
コード例 #11
0
def report_data(request):

    daterange = json.loads(request.POST['daterange'])
    params = {
        'startDate': daterange['startDate'],
        'endDate': daterange['endDate'],
        'reporting_type': request.POST['reporting_type']
    }

    errors = []

    if request.POST['reporting_type'] in ('access', 'security'):
        apps = []
        apps_id = json.loads(request.POST['apps'])
        if apps_id is not None:
            repos = {}
            for app_id in apps_id:
                app = Application.objects.with_id(ObjectId(app_id))
                try:
                    repos[app.log_custom.repository].append(app)
                except:
                    repos[app.log_custom.repository] = [app]
        else:
            for app in Application.objects():
                if app.log_custom.repository_type == 'data':
                    apps.append(app)

            repos = {}
            for app in apps:
                try:
                    repos[app.log_custom.repository].append(app)
                except:
                    repos[app.log_custom.repository] = [app]

        params['type_logs'] = 'access'

        results = {}
        for repo, apps in repos.items():
            params['apps'] = apps
            try:
                if repo.type_uri == 'mongodb':
                    client = MongoDBClient(repo)

                elif repo.type_uri == 'elasticsearch':
                    client = ElasticSearchClient(repo)

                aggregation = client.aggregate(params)
                if results:
                    results = client.merge_aggregations(aggregation, results)
                else:
                    results = aggregation
            except ClientLogException as e:
                errors.append(str(e))

        results = client.fill_data(results)

    elif request.POST['reporting_type'] == 'packet_filter':
        node_id = request.POST['node']
        results = {}

        node = Node.objects.with_id(ObjectId(node_id))
        repo = node.system_settings.pf_settings.repository

        params['type_logs'] = 'packet_filter'
        params['node'] = node.name
        try:
            if repo.type_uri == 'mongodb':
                client = MongoDBClient(repo)
                results = client.aggregate(params)

            elif repo.type_uri == 'elasticsearch':
                client = ElasticSearchClient(repo)
                results = client.aggregate(params)

        except ClientLogException as e:
            errors.append(str(e))

        results = client.fill_data(results)

    return JsonResponse({'results': results, 'errors': errors})
コード例 #12
0
ファイル: logs.py プロジェクト: underscoredje/vulture3-gui
def get_logs(request):
    """ Get logs from databases.
    Handle MongoDB and ElasticSearch

    """
    cluster = Cluster.objects.get()

    params = {
        'type_logs'   : request.POST['type_logs'],
        'start'       : int(request.POST['iDisplayStart']),
        'length'      : int(request.POST['iDisplayLength']),
        'sorting'     : request.POST['sColumns'].split(',')[int(request.POST['iSortCol_0'])],
        'type_sorting': request.POST['sSortDir_0'],
        'columns'     : request.POST['columns'],
        'dataset'     : False,
        'filter'      : {
            'startDate': request.POST["startDate"],
            'endDate'  : request.POST["endDate"],
        }
    }

    if request.POST['type_data'] == 'waf':
        app_id  = request.POST['app_id']

        ## Fetch the application
        app  = Application.objects.with_id(ObjectId(app_id))
        repo = app.log_custom.repository
        params['filter']['rules'] = json.loads(request.POST['rules'])
        params['filter']['app']   = {
            'name'        : str(app.name).replace(' ', '_'),
            'public_dir'  : app.public_dir,
            'public_name' : app.public_name,
            'public_alias': app.public_alias
        }

    elif request.POST['type_data'] == 'packet_filter':
        node_id = request.POST['node']
        result = {
            'max': 0,
            'data': []
        }

        node = Node.objects.with_id(ObjectId(node_id))
        repo = node.system_settings.pf_settings.repository

        params['filter']['node'] = node.name
        try:
            params['filter']['rules'] = json.loads(request.POST[repo.type_uri])
        except:
            params['filter']['rules'] = json.loads(request.POST['rules'])

    elif request.POST['type_data'] in ('vulture', 'diagnostic'):
        params['filter']['rules'] = json.loads(request.POST['rules'])
        repo = cluster.system_settings.global_settings.repository
    try:
        if repo.type_uri == 'mongodb':
            mongo_client = MongoDBClient(repo)
            result = mongo_client.search(params)

        elif repo.type_uri == 'elasticsearch':
            elastic_client = ElasticSearchClient(repo)
            result = elastic_client.search(params)

    except ClientLogException as e:
        result = "Error:\n" + str(e)
        return JsonResponse({
            "iTotalRecords"       : 0,
            "iTotalDisplayRecords": 0,
            "aaData"              : result
        })
    except Exception as e:
        result = "Error:\nAn error occured while fetching logs"
        return JsonResponse({
            "iTotalRecords"       : 0,
            "iTotalDisplayRecords": 0,
            "aaData"              : result
        })

    data = []
    for res in result['data']:
        ## Render data to STR to json parse
        temp = {}
        for key, value in res.items():
            temp['info'] = "<i class='fa fa-chevron-circle-right'></i><i style='display:none;' class='fa fa-chevron-circle-down'></i>"

            if key == 'requested_uri':
                temp[key] = str(value)[0:100]
                temp['requested_uri_full'] = str(value)
            elif key == 'time' and repo.type_uri == 'mongodb':
                temp[key] = value.replace(tzinfo=pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S%z")
            elif key == 'info':
                temp['info_pf'] = str(value)
            else:
                temp[key] = str(value)

        data.append(temp)

    return JsonResponse({
        "iTotalRecords"       : result['max'],
        "iTotalDisplayRecords": result['max'],
        "aaData"              : data
    })
コード例 #13
0
ファイル: logs.py プロジェクト: underscoredje/vulture3-gui
def export_logs(request):
    """ Get logs from database with a search query
    Return a csv formatted file
    """

    if request.method == 'POST':
        cluster   = Cluster.objects.get()

        date = json.loads(request.POST['date'])
        params = {
            'start'       : None,
            'length'      : None,
            'sorting'     : None,
            'type_sorting': None,
            'dataset'     : False,
            'type_logs'   : request.POST['type_logs'],
            'filter'      : {
                'startDate': date['startDate'],
                'endDate'  : date["endDate"],
            }
        }


        if request.POST['type_data'] == 'waf':
            app_id  = request.POST['app_id']

            ## Fetch the application
            app  = Application.objects.with_id(ObjectId(app_id))
            repo = app.log_custom.repository
            params['filter']['rules'] = json.loads(request.POST['rules'])
            params['filter']['app']   = {
                'name'        : str(app.name).replace(' ', '_'),
                'public_dir'  : app.public_dir,
                'public_name' : app.public_name,
                'public_alias': app.public_alias
            }

        elif request.POST['type_data'] == 'packet_filter':
            node_id = request.POST['node']
            result = {
                'max': 0,
                'data': []
            }

            node = Node.objects.with_id(ObjectId(node_id))
            repo = node.system_settings.pf_settings.repository
            params['filter']['node'] = node.name;
            try:
                params['filter']['rules'] = json.loads(request.POST[repo.type_uri])
            except:
                params['filter']['rules'] = json.loads(request.POST['rules'])

        elif request.POST['type_data'] == 'vulture':
            repo = cluster.system_settings.global_settings.repository
            params['filter']['rules'] = json.loads(request.POST['rules'])

        try:

            if repo.type_uri == 'mongodb':
                mongo_client = MongoDBClient(repo)
                result = mongo_client.search(params)

            elif repo.type_uri == 'elasticsearch':
                elastic_client = ElasticSearchClient(repo)
                result = elastic_client.search(params)

            with open('/tmp/logs.csv', 'w') as csvfile:
                writer = csv.DictWriter(csvfile, result['data'][0].keys())
                for row in result['data']:
                    if '@timestamp' in row:
                        row.pop('@timestamp')

                    if repo.type_uri == 'mongodb':
                            row['time'] = row['time'].replace(tzinfo=pytz.UTC).strftime("%Y-%m-%dT%H:%M:%S%z")

                    writer.writerow(row)

            return JsonResponse({'status': True})
        except IndexError:
            return JsonResponse({
                "status"              : False,
                "reason"              : "Index Error:\n search results are empty"
            })
        except ClientLogException as e:
            return JsonResponse({
                "status"              : False,
                "reason"              : "Error:\n" + str(e)
            })
        except Exception:
            return JsonResponse({
                "status"              : False,
                "reason"              : "Error:\nAn error occured while exporting logs"
            })
    elif request.method == 'GET':
        wrapper      = FileWrapper(open('/tmp/logs.csv'))
        content_type = mimetypes.guess_type('/tmp/logs.csv')[0]
        response     = HttpResponse(wrapper,content_type=content_type)
        response['Content-Length']      = os.path.getsize('/tmp/logs.csv')
        response['Content-Disposition'] = "attachment; filename=logs.csv"
        return response
コード例 #14
0
def graph_realtime(request):
    """
    :param request:
    :return:
    """
    dataset_id = request.POST['dataset_id']
    dataset = Dataset.objects.only('application', 'uri',
                                   'uri2').with_id(ObjectId(dataset_id))
    algo_used = request.POST['algo_used']
    app = dataset.application
    repo = app.log_custom.repository

    if isinstance(repo, MongoDBRepository):
        client = MongoDBClient(repo)
    else:
        client = ElasticSearchClient(repo)

    now = datetime.now()
    if "Req_per_min_per" in algo_used:
        before = now - timedelta(seconds=60)
    else:
        before = now - timedelta(seconds=5)

    # columns = {
    #     'Levenstein': "_id,requested_uri,src_ip",
    #     'Levenstein2': "_id,requested_uri,src_ip",
    #     'HTTPcode_bytes_received': "_id,http_code,bytes_received,src_ip,requested_uri",
    #     "HTTPcode_bytes_sent": "_id,http_code,bytes_sent,src_ip,requested_uri",
    #     "Req_per_min_per_ip": "_id,src_ip,time",
    #     "Req_per_pin_per_user": "******",
    #     "Ratio": "_id,http_code,bytes_received,bytes_sent,requested_uri"
    # }

    params = {
        'dataset': True,
        'sorting': "time",
        'type_sorting': "desc",
        'type_logs': "access",
        'columns': algo_used,
        'start': 0,
        'length': 20,
        'filter': {
            'rules': {},
            'startDate': before.strftime('%Y-%m-%dT%H:%M:%SZ'),
            'endDate': now.strftime('%Y-%m-%dT%H:%M:%SZ'),
            'app': {
                'public_name': app.public_name,
                'public_dir': app.public_dir,
                'name': app.name,
                'public_alias': app.public_alias,
            }
        }
    }

    result = client.search(params, id_query=None)['data']
    liste_dist, AnomalyArray, data, info, liste_sympa = [], [], [], [], []

    if len(result):
        mysvm, temp1, temp2 = retrieve_SVM(ObjectId(dataset_id), algo_used)

        if algo_used == "Levenstein":
            uri = dataset.uri
            for i in result:
                for j in i['requested_uri'].split("/"):
                    if j != "" and j not in liste_sympa:
                        liste_sympa.append(j)
            for key in liste_sympa:
                liste_temp = list()
                for key2 in uri:
                    dist = leven.distance(str(key), str(key2))
                    liste_temp.append(dist)
                average = sum(liste_temp) / len(liste_temp)
                liste_dist.append([average, len(key)])
                info.append(key)
            data = liste_dist
            if len(liste_dist):
                AnomalyArray = mysvm.predict(np.array(data)).tolist()

        elif algo_used == "Levenstein2":
            uri = dataset.uri2
            for i in result:
                if i['requested_uri'] != "/" \
                        and i['requested_uri'] not in liste_sympa:
                    liste_sympa.append(i['requested_uri'])
            for key in liste_sympa:
                liste_temp = list()
                for key2 in uri:
                    dist = leven.distance(str(key), str(key2))
                    liste_temp.append(dist)
                average = sum(liste_temp) / len(liste_temp)
                liste_dist.append([average, len(key)])
                info.append(key)
            data = liste_dist
            if len(liste_dist):
                AnomalyArray = mysvm.predict(np.array(data)).tolist()

        elif algo_used == "HTTPcode_bytes_received":
            for i in result:
                info.append(i['src_ip'] + " " + i["requested_uri"])
                data.append([int(i["http_code"]), i["bytes_received"]])
            if len(data):
                AnomalyArray = mysvm.predict(np.array(data)).tolist()

        elif algo_used == "HTTPcode_bytes_sent":

            for i in result:
                info.append(i['src_ip'] + " " + i["requested_uri"])
                data.append([int(i["http_code"]), i["bytes_sent"]])
            if len(data):
                AnomalyArray = mysvm.predict(np.array(data)).tolist()

        elif algo_used == "Ratio":
            for i in result:
                data.append([
                    int(i["http_code"]),
                    int(i["bytes_sent"]) / int(i["bytes_received"])
                ])
                info.append(str(i["src_ip"]) + " " + str(i["requested_uri"]))
            if len(data):
                AnomalyArray = mysvm.predict(np.array(data)).tolist()

        elif algo_used == "Req_per_min_per_ip":
            dico = dict()
            for i in result:
                date = datetime.strptime(
                    "1970:01:01 " + str(i['time']).split(' ')[1],
                    "%Y:%m:%d %H:%M:%S")

                date = time.mktime(date.timetuple())
                try:
                    dico[date][i['src_ip']] += 1
                except KeyError:
                    dico[date] = {i['src_ip']: 1}

            for timestamp, value in dico.items():
                data.append([float(timestamp) / 1000, value.values()[0]])
                info.append(value.keys()[0])

            if len(data):
                AnomalyArray = mysvm.predict(np.array(data)).tolist()

        elif algo_used == "Req_per_min_per_user":
            dico = dict()
            for i in result:
                date = datetime.strptime(
                    "1970:01:01 " + str(i['time']).split(' ')[1],
                    "%Y:%m:%d %H:%M:%S")

                date = time.mktime(date.timetuple())
                try:
                    dico[date][i['user']] += 1
                except KeyError:
                    dico[date] = {i['user']: 1}
            for timestamp, value in dico.items():
                data.append([float(timestamp) / 1000, value.values()[0]])
                info.append(value.keys()[0])

            if len(data):
                AnomalyArray = mysvm.predict(np.array(data)).tolist()

        for key, sub_data in enumerate(data):
            try:
                data[key].append(info[key])
            except Exception:
                continue

    return JsonResponse({"data": data, "anomaly": AnomalyArray})