コード例 #1
0
def get_tag_safelist(**_):
    """
    Get the current tag_safelist

    Variables:
    None

    Arguments:
    default    =>  Load the default values that came with the system

    Data Block:
    None

    Result example:
    <current tag_safelist.yml file>
    """
    default = request.args.get('default', 'false').lower() in ['true', '']

    with forge.get_cachestore('system', config=config,
                              datastore=STORAGE) as cache:
        tag_safelist_yml = cache.get('tag_safelist_yml')
        if not tag_safelist_yml or default:
            yml_data = forge.get_tag_safelist_data()
            if yml_data:
                return make_api_response(yaml.safe_dump(yml_data))

            return make_api_response(
                None, "Could not find the tag_safelist.yml file", 404)

        return make_api_response(safe_str(tag_safelist_yml))
コード例 #2
0
def get_identify_custom_yara_file(**_):
    """
    Get identify's current custom Yara file

    Variables:
    None

    Arguments:
    default    =>  Load the default values that came with the system

    Data Block:
    None

    Result example:
    <current custom.yara file>
    """
    default = request.args.get('default', 'false').lower() in ['true', '']

    with forge.get_cachestore('system', config=config,
                              datastore=STORAGE) as cache:
        custom_yara = cache.get('custom_yara')
        if not custom_yara or default:
            with open(constants.YARA_RULE_PATH) as mfh:
                return make_api_response(mfh.read())

        return make_api_response(custom_yara.decode('utf-8'))
コード例 #3
0
    def _load_magic_file(self):
        self.magic_file = ':'.join(
            (constants.MAGIC_RULE_PATH, '/usr/share/file/magic.mgc'))

        if self.use_cache:
            self.log.info("Checking for custom magic file...")
            with get_cachestore('system',
                                config=self.config,
                                datastore=self.datastore) as cache:
                try:
                    custom_magic = "/tmp/custom.magic"
                    cache.download('custom_magic', custom_magic)
                    self.magic_file = ':'.join(
                        (custom_magic, '/usr/share/file/magic.mgc'))
                    self.log.info("Custom magic file loaded!")
                except FileStoreException:
                    self.log.info("No custom magic file found.")

        with self.lock:
            self.file_type = magic.magic_open(magic.MAGIC_CONTINUE +
                                              magic.MAGIC_RAW)
            magic.magic_load(self.file_type, self.magic_file)

            self.mime_type = magic.magic_open(magic.MAGIC_CONTINUE +
                                              magic.MAGIC_RAW +
                                              magic.MAGIC_MIME)
            magic.magic_load(self.mime_type, self.magic_file)
コード例 #4
0
def get_identify_magic_patterns(**_):
    """
    Get identify's magic patterns

    Variables:
    None

    Arguments:
    default    =>  Load the default values that came with the system

    Data Block:
    None

    Result example:
    <current identify's magic patterns>
    """
    default = request.args.get('default', 'false').lower() in ['true', '']

    with forge.get_cachestore('system', config=config,
                              datastore=STORAGE) as cache:
        custom_patterns = cache.get('custom_patterns')
        if not custom_patterns or default:
            return make_api_response(yaml.safe_dump(magic_patterns))

        return make_api_response(custom_patterns.decode('utf-8'))
コード例 #5
0
def cachestore(datastore_connection):
    cachestore = forge.get_cachestore(COMPONENT,
                                      datastore=datastore_connection)
    cachestore.datastore.cached_file.delete_by_query("id:*")
    cachestore.save(KEY, DATA)
    cachestore.datastore.cached_file.commit()

    return cachestore
コード例 #6
0
def put_identify_magic_patterns(**_):
    """
    Save a new version of identify's magic patterns file

    Variables:
    None

    Arguments:
    None

    Data Block:
    <new magic patterns file>

    Result example:
    {"success": True}
    """
    data = request.json.encode('utf-8')

    try:
        patterns = yaml.safe_load(data)
        for pattern in patterns:
            if 'al_type' not in pattern:
                raise ValueError(
                    f"Missing 'al_type' in pattern: {str(pattern)}")

            if not al_re.match(pattern['al_type']):
                raise ValueError(
                    f"Invalid 'al_type' in pattern: {str(pattern)}")

            if 'regex' not in pattern:
                raise ValueError(f"Missing 'regex' in pattern: {str(pattern)}")

            try:
                re.compile(pattern['regex'])
            except Exception:
                raise ValueError(
                    f"Invalid regular expression in pattern: {str(pattern)}")
    except Exception as e:
        return make_api_response({'success': False},
                                 err=str(e),
                                 status_code=400)

    with forge.get_cachestore('system', config=config,
                              datastore=STORAGE) as cache:
        if yaml.safe_dump(patterns) == yaml.safe_dump(magic_patterns):
            cache.delete('custom_patterns')
        else:
            cache.save('custom_patterns', data, ttl=ADMIN_FILE_TTL, force=True)

    # Notify components watching to reload magic patterns
    event_sender.send('identify', 'patterns')

    return make_api_response({'success': True})
コード例 #7
0
def flowjs_check_chunk(**kwargs):
    """
    Flowjs check file chunk.

    This API is reserved for the FLOWJS file uploader. It allows FLOWJS
    to check if the file chunk already exists on the server.

    Variables:
    None

    Arguments (REQUIRED):
    flowChunkNumber      => Current chunk number
    flowFilename         => Original filename
    flowTotalChunks      => Total number of chunks
    flowIdentifier       => File unique identifier
    flowCurrentChunkSize => Size of the current chunk

    Data Block:
    None

    Result example:
    {'exists': True}     #Does the chunk exists on the server?
    """

    flow_chunk_number = request.args.get("flowChunkNumber", None)
    flow_chunk_size = request.args.get("flowChunkSize", None)
    flow_total_size = request.args.get("flowTotalSize", None)
    flow_filename = request.args.get("flowFilename", None)
    flow_total_chunks = request.args.get("flowTotalChunks", None)
    flow_identifier = request.args.get("flowIdentifier", None)
    flow_current_chunk_size = request.args.get("flowCurrentChunkSize", None)

    if not flow_chunk_number or not flow_identifier or not flow_current_chunk_size or not flow_filename \
            or not flow_total_chunks or not flow_chunk_size or not flow_total_size:
        return make_api_response(
            "", "Required arguments missing. flowChunkNumber, flowIdentifier, "
            "flowCurrentChunkSize, flowChunkSize and flowTotalSize "
            "should always be present.", 412)

    filename = get_cache_name(flow_identifier, flow_chunk_number)
    with forge.get_cachestore("flowjs", config) as cache:
        if cache.exists(filename):
            return make_api_response({"exist": True})
        else:
            return make_api_response(
                {
                    "exist": False,
                    "msg": "Chunk does not exist, please send it!"
                },
                status_code=204)
コード例 #8
0
def put_identify_custom_yara_file(**_):
    """
    Save a new version of identify's custom Yara file

    Variables:
    None

    Arguments:
    None

    Data Block:
    <current custom.yara file>

    Result example:
    {"success": True}
    """
    data = request.json.encode('utf-8')

    yara_file = None
    try:
        with tempfile.NamedTemporaryFile(delete=False) as tmp:
            yara_file = tmp.name
            tmp.write(data)

        try:
            yara_default_externals = {'mime': '', 'magic': '', 'type': ''}
            yara.compile(filepaths={"default": yara_file},
                         externals=yara_default_externals)
        except Exception as e:
            message = str(e).replace(yara_file, "custom.yara line ")
            return make_api_response(
                {'success': False},
                f"The Yara file you have submitted is invalid: {message}", 400)
    finally:
        if yara_file and os.path.exists(yara_file):
            os.unlink(yara_file)

    with forge.get_cachestore('system', config=config,
                              datastore=STORAGE) as cache:
        if hashlib.sha256(data).hexdigest() == get_sha256_for_file(
                constants.YARA_RULE_PATH):
            cache.delete('custom_yara')
        else:
            cache.save('custom_yara', data, ttl=ADMIN_FILE_TTL, force=True)

    # Notify components watching to reload yara file
    event_sender.send('identify', 'yara')

    return make_api_response({'success': True})
コード例 #9
0
def put_identify_custom_magic_file(**_):
    """
    Save a new version of identify's custom LibMagic file

    Variables:
    None

    Arguments:
    None

    Data Block:
    <current custom.magic file>

    Result example:
    {"success": True}
    """
    data = request.json.encode('utf-8')

    magic_file = None
    try:
        with tempfile.NamedTemporaryFile(delete=False) as tmp:
            magic_file = tmp.name
            tmp.write(data)

        try:
            test = magic.magic_open(magic.MAGIC_CONTINUE + magic.MAGIC_RAW)
            magic.magic_load(test, magic_file)
        except magic.MagicException:
            return make_api_response(
                {'success': False},
                "The magic file you have submitted is invalid.", 400)
    finally:
        if magic_file and os.path.exists(magic_file):
            os.unlink(magic_file)

    with forge.get_cachestore('system', config=config,
                              datastore=STORAGE) as cache:
        if hashlib.sha256(data).hexdigest() == get_sha256_for_file(
                constants.MAGIC_RULE_PATH):
            cache.delete('custom_magic')
        else:
            cache.save('custom_magic', data, ttl=ADMIN_FILE_TTL, force=True)

    # Notify components watching to reload magic file
    event_sender.send('identify', 'magic')

    return make_api_response({'success': True})
コード例 #10
0
def put_tag_safelist(**_):
    """
    Save a new version of the tag_safelist file

    Variables:
    None

    Arguments:
    None

    Data Block:
    <new tag_safelist.yml file>

    Result example:
    {"success": true}
    """
    tag_safelist_yml = request.json

    try:
        yml_data = yaml.safe_load(tag_safelist_yml)
        for key in yml_data.keys():
            if key not in ['match', 'regex']:
                raise Exception('Invalid key found.')

            fields = Tagging.flat_fields()
            for tag_type in ['match', 'regex']:
                for key, value in yml_data[tag_type].items():
                    if key not in fields:
                        raise Exception(f'{key} is not a valid tag type')

                    if not isinstance(value, list):
                        raise Exception(
                            f'Value for {key} should be a list of strings')
    except Exception as e:
        return make_api_response(
            None, f"Invalid tag_safelist.yml file submitted: {str(e)}", 400)

    with forge.get_cachestore('system', config=config,
                              datastore=STORAGE) as cache:
        cache.save('tag_safelist_yml',
                   tag_safelist_yml.encode('utf-8'),
                   ttl=ADMIN_FILE_TTL,
                   force=True)

    return make_api_response({'success': True})
コード例 #11
0
def put_identify_trusted_mimetypes(**_):
    """
    Save a new version of identify's trusted mimetypes file

    Variables:
    None

    Arguments:
    None

    Data Block:
    <new trusted mimetypes file>

    Result example:
    {"success": True}
    """
    data = request.json.encode('utf-8')

    try:
        mimes = yaml.safe_load(data)
        for k, v in mimes.items():
            if not isinstance(k, str) or not al_re.match(k):
                raise ValueError(f"Invalid mimetype in item: [{k}: {v}]")

            if not isinstance(v, str) or not al_re.match(v):
                raise ValueError(f"Invalid AL type in item [{k}: {v}]")

    except Exception as e:
        return make_api_response({'success': False},
                                 err=str(e),
                                 status_code=400)

    with forge.get_cachestore('system', config=config,
                              datastore=STORAGE) as cache:
        if yaml.safe_dump(mimes) == yaml.safe_dump(trusted_mimes):
            cache.delete('custom_mimes')
        else:
            cache.save('custom_mimes', data, ttl=ADMIN_FILE_TTL, force=True)

    # Notify components watching to reload trusted mimes
    event_sender.send('identify', 'mimes')

    return make_api_response({'success': True})
コード例 #12
0
    def get_safelisted_tags(self, tag_types):
        if isinstance(tag_types, str):
            tag_types = tag_types.split(',')

        with forge.get_cachestore('system',
                                  config=self.config,
                                  datastore=self.datastore) as cache:
            tag_safelist_yml = cache.get('tag_safelist_yml')
            if tag_safelist_yml:
                tag_safelist_data = yaml.safe_load(tag_safelist_yml)
            else:
                tag_safelist_data = forge.get_tag_safelist_data()

        if tag_types:
            output = {
                'match': {
                    k: v
                    for k, v in tag_safelist_data.get('match', {}).items()
                    if k in tag_types or tag_types == []
                },
                'regex': {
                    k: v
                    for k, v in tag_safelist_data.get('regex', {}).items()
                    if k in tag_types or tag_types == []
                },
            }
            for tag in tag_types:
                for sl in self.datastore.safelist.stream_search(
                        f"type:tag AND enabled:true AND tag.type:{tag}",
                        as_obj=False):
                    output['match'].setdefault(sl['tag']['type'], [])
                    output['match'][sl['tag']['type']].append(
                        sl['tag']['value'])

        else:
            output = tag_safelist_data
            for sl in self.datastore.safelist.stream_search(
                    "type:tag AND enabled:true", as_obj=False):
                output['match'].setdefault(sl['tag']['type'], [])
                output['match'][sl['tag']['type']].append(sl['tag']['value'])

        return output
コード例 #13
0
    def _load_yara_file(self):
        self.yara_file = constants.YARA_RULE_PATH

        if self.use_cache:
            self.log.info("Checking for custom yara file...")
            with get_cachestore('system',
                                config=self.config,
                                datastore=self.datastore) as cache:
                try:
                    custom_yara = "/tmp/custom.yara"
                    cache.download('custom_yara', custom_yara)
                    self.yara_file = custom_yara
                    self.log.info("Custom yara file loaded!")
                except FileStoreException:
                    self.log.info("No custom magic file found.")

        yara_rules = yara.compile(filepaths={"default": self.yara_file},
                                  externals=self.yara_default_externals)
        with self.lock:
            self.yara_rules = yara_rules
コード例 #14
0
    def _load_trusted_mimes(self):
        trusted_mimes = default_trusted_mimes

        if self.use_cache:
            self.log.info("Checking for custom trusted mimes...")
            with get_cachestore('system',
                                config=self.config,
                                datastore=self.datastore) as cache:
                try:
                    mimes = cache.get('custom_mimes')
                    if mimes:
                        trusted_mimes = yaml.safe_load(mimes)
                        self.log.info("Custom trusted mimes loaded!")
                    else:
                        self.log.info("No custom magic patterns found.")
                except FileStoreException:
                    self.log.info("No custom trusted mimes found.")

        with self.lock:
            self.trusted_mimes = trusted_mimes
コード例 #15
0
    def _load_magic_patterns(self):
        self.magic_patterns = default_magic_patterns

        if self.use_cache:
            self.log.info("Checking for custom magic patterns...")
            with get_cachestore('system',
                                config=self.config,
                                datastore=self.datastore) as cache:
                try:
                    patterns = cache.get('custom_patterns')
                    if patterns:
                        self.magic_patterns = yaml.safe_load(patterns)
                        self.log.info("Custom magic patterns loaded!")
                    else:
                        self.log.info("No custom magic patterns found.")
                except FileStoreException:
                    self.log.info("No custom magic patterns found.")

        compiled_patterns = [[
            x['al_type'], re.compile(x['regex'], re.IGNORECASE)
        ] for x in self.magic_patterns]
        with self.lock:
            self.compiled_magic_patterns = compiled_patterns
コード例 #16
0
ファイル: signature.py プロジェクト: rodadmin/assemblyline-ui
def download_signatures(**kwargs):
    """
    Download signatures from the system.

    Variables:
    None

    Arguments:
    query       => Query used to filter the signatures
                   Default: All deployed signatures

    Data Block:
    None

    Result example:
    <A zip file containing all signatures files from the different sources>
    """
    user = kwargs['user']
    query = request.args.get('query', 'status:DEPLOYED')

    access = user['access_control']
    last_modified = STORAGE.get_signature_last_modified()

    query_hash = sha256(
        f'{query}.{access}.{last_modified}'.encode('utf-8')).hexdigest()

    with forge.get_cachestore('al_ui.signature') as signature_cache:
        response = _get_cached_signatures(signature_cache, query_hash)
        if response:
            return response

        with Lock(f"al_signatures_{query_hash[:7]}.zip", 30):
            response = _get_cached_signatures(signature_cache, query_hash)
            if response:
                return response

            output_files = {}

            keys = [
                k['id'] for k in STORAGE.signature.stream_search(
                    query, fl="id", access_control=access, as_obj=False)
            ]
            signature_list = sorted(STORAGE.signature.multiget(
                keys, as_dictionary=False, as_obj=False),
                                    key=lambda x: x['order'])

            for sig in signature_list:
                out_fname = f"{sig['type']}/{sig['source']}"
                output_files.setdefault(out_fname, [])
                output_files[out_fname].append(sig['data'])

            output_zip = InMemoryZip()
            for fname, data in output_files.items():
                output_zip.append(fname, "\n\n".join(data))

            rule_file_bin = output_zip.read()

            signature_cache.save(query_hash,
                                 rule_file_bin,
                                 ttl=DEFAULT_CACHE_TTL)

            return make_file_response(rule_file_bin,
                                      f"al_signatures_{query_hash[:7]}.zip",
                                      len(rule_file_bin),
                                      content_type="application/zip")
コード例 #17
0
def start_ui_submission(ui_sid, **kwargs):
    """
    Start UI submission.

    Starts processing after files where uploaded to the server.

    Variables:
    ui_sid     => UUID for the current UI file upload

    Arguments:
    None

    Data Block (REQUIRED):
    Dictionary of UI specific user settings

    Result example:
    {
     'started': True,                    # Has the submission started processing?
     'sid' : "c7668cfa-...-c4132285142e" # Submission ID
    }
    """
    user = kwargs['user']

    ui_params = request.json
    ui_params['groups'] = kwargs['user']['groups']
    ui_params['quota_item'] = True
    ui_params['submitter'] = user['uname']

    if not Classification.is_accessible(user['classification'],
                                        ui_params['classification']):
        return make_api_response({
            "started": False,
            "sid": None
        }, "You cannot start a scan with higher "
                                 "classification then you're allowed to see",
                                 403)

    quota_error = check_submission_quota(user)
    if quota_error:
        return make_api_response("", quota_error, 503)

    submit_result = None
    submitted_file = None

    try:
        # Download the file from the cache
        with forge.get_cachestore("flowjs", config) as cache:
            ui_sid = get_cache_name(ui_sid)
            if cache.exists(ui_sid):
                target_dir = os.path.join(TEMP_DIR, ui_sid)
                os.makedirs(target_dir, exist_ok=True)

                target_file = os.path.join(target_dir,
                                           ui_params.pop('filename', ui_sid))

                if os.path.exists(target_file):
                    os.unlink(target_file)

                # Save the reconstructed file
                cache.download(ui_sid, target_file)
                submitted_file = target_file

        # Submit the file
        if submitted_file is not None:
            with open(submitted_file, 'rb') as fh:
                if is_cart(fh.read(256)):
                    meta = get_metadata_only(submitted_file)
                    if meta.get('al',
                                {}).get('type',
                                        'unknown') == 'archive/bundle/al':
                        try:
                            submission = import_bundle(submitted_file,
                                                       allow_incomplete=True,
                                                       identify=IDENTIFY)
                        except Exception as e:
                            return make_api_response("",
                                                     err=str(e),
                                                     status_code=400)
                        return make_api_response({
                            "started": True,
                            "sid": submission['sid']
                        })

            if not ui_params['description']:
                ui_params[
                    'description'] = f"Inspection of file: {os.path.basename(submitted_file)}"

            # Submit to dispatcher
            try:
                params = ui_to_submission_params(ui_params)

                # Enforce maximum DTL
                if config.submission.max_dtl > 0:
                    params['ttl'] = min(int(
                        params['ttl']), config.submission.max_dtl) if int(
                            params['ttl']) else config.submission.max_dtl

                submission_obj = Submission({"files": [], "params": params})
            except (ValueError, KeyError) as e:
                return make_api_response("", err=str(e), status_code=400)

            try:
                submit_result = SubmissionClient(
                    datastore=STORAGE,
                    filestore=FILESTORE,
                    config=config,
                    identify=IDENTIFY).submit(submission_obj,
                                              local_files=[submitted_file])
                submission_received(submission_obj)
            except SubmissionException as e:
                return make_api_response("", err=str(e), status_code=400)

            return make_api_response({
                "started": True,
                "sid": submit_result.sid
            })
        else:
            return make_api_response({
                "started": False,
                "sid": None
            }, "No files where found for ID %s. "
                                     "Try again..." % ui_sid, 404)
    finally:
        if submit_result is None:
            decrement_submission_quota(user)

        # Remove file
        if os.path.exists(submitted_file):
            os.unlink(submitted_file)

        # Remove dir
        if os.path.exists(target_dir) and os.path.isdir(target_dir):
            os.rmdir(target_dir)
コード例 #18
0
def flowjs_upload_chunk(**kwargs):
    """
    Flowjs upload file chunk.

    This API is reserved for the FLOWJS file uploader. It allows
    FLOWJS to upload a file chunk to the server.

    Variables:
    None

    Arguments (REQUIRED):
    flowChunkNumber      => Current chunk number
    flowChunkSize        => Usual size of the chunks
    flowCurrentChunkSize => Size of the current chunk
    flowTotalSize        => Total size for the file
    flowIdentifier       => File unique identifier
    flowFilename         => Original filename
    flowRelativePath     => Relative path of the file on the client
    flowTotalChunks      => Total number of chunks

    Data Block:
    None

    Result example:
    {
     'success': True,     #Did the upload succeeded?
     'completed': False   #Are all chunks received by the server?
     }
    """

    flow_chunk_number = request.form.get("flowChunkNumber", None)
    flow_chunk_size = request.form.get("flowChunkSize", None)
    flow_current_chunk_size = request.form.get("flowCurrentChunkSize", None)
    flow_total_size = request.form.get("flowTotalSize", None)
    flow_identifier = request.form.get("flowIdentifier", None)
    flow_filename = safe_str(request.form.get("flowFilename", None))
    flow_relative_path = request.form.get("flowRelativePath", None)
    flow_total_chunks = request.form.get("flowTotalChunks", None)
    completed = True

    if not flow_chunk_number or not flow_chunk_size or not flow_current_chunk_size or not flow_total_size \
            or not flow_identifier or not flow_filename or not flow_relative_path or not flow_total_chunks:
        return make_api_response(
            "", "Required arguments missing. flowChunkNumber, flowChunkSize, "
            "flowCurrentChunkSize, flowTotalSize, flowIdentifier, flowFilename, "
            "flowRelativePath and flowTotalChunks should always be present.",
            412)

    filename = get_cache_name(flow_identifier, flow_chunk_number)

    with forge.get_cachestore("flowjs", config) as cache:
        file_obj = request.files['file']
        cache.save(filename, file_obj.stream.read())

        # Test in reverse order to fail fast
        for chunk in range(int(flow_total_chunks), 0, -1):
            chunk_name = get_cache_name(flow_identifier, chunk)
            if not cache.exists(chunk_name):
                completed = False
                break

        if completed:
            # Reconstruct the file
            ui_sid = get_cache_name(flow_identifier)
            target_file = os.path.join(TEMP_DIR, ui_sid)
            try:
                os.makedirs(TEMP_DIR)
            except Exception:
                pass

            try:
                os.unlink(target_file)
            except Exception:
                pass

            for chunk in range(int(flow_total_chunks)):
                chunk_name = get_cache_name(flow_identifier, chunk + 1)
                with open(target_file, "ab") as t:
                    t.write(cache.get(chunk_name))
                cache.delete(chunk_name)

            # Save the reconstructed file
            with open(target_file, "rb") as t:
                cache.save(ui_sid, t.read())

            os.unlink(target_file)

    return make_api_response({'success': True, 'completed': completed})