Exemplo n.º 1
0
def _get_cached_signatures(signature_cache, query_hash):
    try:
        s = signature_cache.get(query_hash)
        if s is None or s == b'':
            return s
        return make_file_response(s,
                                  f"al_signatures_{query_hash[:7]}.zip",
                                  len(s),
                                  content_type="application/zip")
    except Exception:  # pylint: disable=W0702
        LOGGER.exception('Failed to read cached signatures:')

    return None
Exemplo n.º 2
0
def backup(**_):
    """
    Create a backup of the current system configuration

    Variables:
    None

    Arguments:
    None

    Data Block:
    None

    Result example:
    <SERVICE BACKUP>
    """
    services = {'type': 'backup', 'server': config.ui.fqdn, 'data': {}}

    for service in STORAGE.service_delta.stream_search("*:*",
                                                       fl="id",
                                                       as_obj=False):
        name = service['id']
        service_output = {
            'config': STORAGE.service_delta.get(name, as_obj=False),
            'versions': {}
        }
        for service_version in STORAGE.service.stream_search(f"name:{name}",
                                                             fl="id",
                                                             as_obj=False):
            version_id = service_version['id']
            service_output['versions'][version_id] = STORAGE.service.get(
                version_id, as_obj=False)

        services['data'][name] = service_output

    out = yaml.dump(services, indent=2)
    return make_file_response(out,
                              name=f"{config.ui.fqdn}_service_backup.yml",
                              size=len(out),
                              content_type="application/json")
Exemplo n.º 3
0
def download_signatures(**kwargs):
    """
    Download signatures from the system.

    Variables:
    None

    Arguments:
    query       => Query used to filter the signatures
                   Default: All deployed signatures

    Data Block:
    None

    Result example:
    <A zip file containing all signatures files from the different sources>
    """
    user = kwargs['user']
    query = request.args.get('query', 'status:DEPLOYED')

    access = user['access_control']
    last_modified = STORAGE.get_signature_last_modified()

    query_hash = sha256(
        f'{query}.{access}.{last_modified}'.encode('utf-8')).hexdigest()

    with forge.get_cachestore('al_ui.signature') as signature_cache:
        response = _get_cached_signatures(signature_cache, query_hash)
        if response:
            return response

        with Lock(f"al_signatures_{query_hash[:7]}.zip", 30):
            response = _get_cached_signatures(signature_cache, query_hash)
            if response:
                return response

            output_files = {}

            keys = [
                k['id'] for k in STORAGE.signature.stream_search(
                    query, fl="id", access_control=access, as_obj=False)
            ]
            signature_list = sorted(STORAGE.signature.multiget(
                keys, as_dictionary=False, as_obj=False),
                                    key=lambda x: x['order'])

            for sig in signature_list:
                out_fname = f"{sig['type']}/{sig['source']}"
                output_files.setdefault(out_fname, [])
                output_files[out_fname].append(sig['data'])

            output_zip = InMemoryZip()
            for fname, data in output_files.items():
                output_zip.append(fname, "\n\n".join(data))

            rule_file_bin = output_zip.read()

            signature_cache.save(query_hash,
                                 rule_file_bin,
                                 ttl=DEFAULT_CACHE_TTL)

            return make_file_response(rule_file_bin,
                                      f"al_signatures_{query_hash[:7]}.zip",
                                      len(rule_file_bin),
                                      content_type="application/zip")
Exemplo n.º 4
0
def get_ontology_for_alert(alert_id, **kwargs):
    """
    WARNING:
        This APIs output is considered stable but the ontology model itself is still in its
        alpha state. Do not use the results of this API in a production system just yet.

    Get all ontology files for a given alert

    Variables:
    alert_id         => Alert ID to get ontology files for

    Arguments:
    sha256      => Only get ontology files for this file, multiple values allowed (optional)
    service     => Only get ontology files for this service, multiple values allowed (optional)

    Data Block:
    None

    Result example:      (File where each line is a result ontology record)
    {"header":{"md5":"5fa76...submitter":"admin"}}
    {"header":{"md5":"6c3af...submitter":"admin"}}
    {"header":{"md5":"c8e69...submitter":"admin"}}
    """
    user = kwargs['user']
    sha256s = request.args.getlist('sha256', None)
    services = request.args.getlist('service', None)

    # Get alert from ID
    alert = STORAGE.alert.get(alert_id, as_obj=False)
    if not alert:
        return make_api_response(
            "", f"There are not alert with this ID: {alert_id}", 404)
    if not Classification.is_accessible(user['classification'],
                                        alert['classification']):
        return make_api_response(
            "",
            f"Your are not allowed get ontology files for this alert: {alert_id}",
            403)

    # Get related submission
    submission = STORAGE.submission.get(alert['sid'], as_obj=False)
    if not submission:
        return make_api_response(
            "", f"The submission related to the alert is missing: {alert_id}",
            404)
    if not Classification.is_accessible(user['classification'],
                                        submission['classification']):
        return make_api_response(
            "",
            f"Your are not allowed get ontology files for the submission related to this alert: {alert_id}",
            403)

    # Get all the results keys
    keys = [k for k in submission['results'] if not k.endswith(".e")]

    # Only use keys matching theses sha256s
    if sha256s:
        tmp_keys = []
        for sha256 in sha256s:
            tmp_keys.extend([k for k in keys if k.startswith(sha256)])
        keys = tmp_keys

    # Only use keys matching theses services
    if services:
        tmp_keys = []
        for service in services:
            tmp_keys.extend([k for k in keys if f".{service}." in k])
        keys = tmp_keys

    # Pull the results for the keys
    try:
        results = STORAGE.result.multiget(keys,
                                          as_dictionary=False,
                                          as_obj=False)
    except MultiKeyError as e:
        results = e.partial_output

    # Compile information to be added to the ontology
    updates = {
        'parent': alert['file']['sha256'],
        'metadata': alert.get('metadata', {}),
        'date': alert['ts'],
        'source_system': config.ui.fqdn,
        'sid': submission['sid'],
        'submitted_classification': submission['classification'],
        'submitter': submission['params']['submitter']
    }

    # Set the list of file names
    fnames = {x['sha256']: [x['name']] for x in submission['files']}

    # Generate ontology files based of the results
    sio = generate_ontology_file(results, user, updates=updates, fnames=fnames)
    data = sio.getvalue()
    return make_file_response(data, f"alert_{alert_id}.ontology", len(data))
Exemplo n.º 5
0
def get_ontology_for_file(sha256, **kwargs):
    """
    WARNING:
        This APIs output is considered stable but the ontology model itself is still in its
        alpha state. Do not use the results of this API in a production system just yet.

    Get all ontology files for a given file

    Variables:
    sha256      => Hash of the files to fetch ontology files for

    Arguments:
    service     => Only get ontology files for this service, multiple values allowed (optional)
    all         => If there multiple ontology results for the same file get them all

    Data Block:
    None

    Result example:      (File where each line is a result ontology record)
    {"header":{"md5":"5fa76...submitter":"admin"}}
    {"header":{"md5":"5fa76...submitter":"admin"}}
    {"header":{"md5":"5fa76...submitter":"admin"}}
    """
    user = kwargs['user']
    services = request.args.getlist('service', None)
    all = request.args.get('all', 'false').lower() in ['true', '']

    # Get file data for hash
    file_data = STORAGE.file.get(sha256, as_obj=False)
    if not file_data:
        return make_api_response(
            "", f"There are not file with this hash: {sha256}", 404)
    if not Classification.is_accessible(user['classification'],
                                        file_data['classification']):
        return make_api_response(
            "",
            f"Your are not allowed get ontology files for this hash: {sha256}",
            403)

    # Generate the queries to get the results
    query = f"id:{sha256}* AND response.supplementary.name:*.ontology"
    filters = []
    if services:
        filters.append(" OR ".join(
            [f'response.service_name:{service}' for service in services]))

    # Get the result keys
    if all:
        keys = [
            x['id'] for x in STORAGE.result.stream_search(
                query,
                fl="id",
                filters=filters,
                access_control=user["access_control"],
                as_obj=False)
        ]
    else:
        service_resp = STORAGE.result.grouped_search(
            "response.service_name",
            query=query,
            fl='id',
            filters=filters,
            sort="created desc",
            access_control=user["access_control"],
            as_obj=False)

        keys = [
            k for service in service_resp['items']
            for k in service['items'][0].values()
        ]

    # Pull the results for the keys
    try:
        results = STORAGE.result.multiget(keys,
                                          as_dictionary=False,
                                          as_obj=False)
    except MultiKeyError as e:
        results = e.partial_output

    # Compile information to be added to the ontology
    updates = {
        'date': file_data['seen']['last'],
        'source_system': config.ui.fqdn,
        'submitted_classification': file_data['classification']
    }

    # Generate ontology files based of the results
    sio = generate_ontology_file(results, user, updates=updates)
    data = sio.getvalue()
    return make_file_response(data, f"file_{sha256}.ontology", len(data))