Exemplo n.º 1
0
    def test_simple(self):
        plaintext = b'0123456789' * 10000000

        pt_stream = BytesIO(plaintext)

        ct_stream = BytesIO()

        cart.pack_stream(pt_stream, ct_stream, {'name': 'hello.txt'},
                         {'digest': 'done'})

        crypt_text = ct_stream.getvalue()
        ct_stream = BytesIO(crypt_text)
        pt_stream = BytesIO()

        temp_file = tempfile.mkstemp()[1]
        with open(temp_file, 'wb') as f:
            f.write(ct_stream.getvalue())

        (header, footer) = cart.unpack_stream(ct_stream, pt_stream)
        inline_metadata = {}
        if header:
            inline_metadata.update(header)

        if footer:
            inline_metadata.update(footer)

        plaintext_prime = pt_stream.getvalue()
        self.assertEqual(plaintext_prime, plaintext)

        metadata = cart.get_metadata_only(temp_file)
        self.assertEqual(metadata, inline_metadata)
        self.assertTrue(cart.is_cart(crypt_text))
Exemplo n.º 2
0
def decode_file(original_path, fileinfo):
    extracted_path = None
    hdr = {}
    with open(original_path, 'rb') as original_file:
        if is_cart(original_file.read(256)):
            original_file.seek(0)

            extracted_fd, extracted_path = tempfile.mkstemp()
            extracted_file = os.fdopen(extracted_fd, 'wb')

            cart_extracted = False
            try:
                hdr, _ = unpack_stream(original_file, extracted_file)
                cart_extracted = True

            except Exception:
                extracted_path = None
                hdr = {}
                fileinfo['type'] = 'corrupted/cart'

            finally:
                extracted_file.close()

            if cart_extracted:
                fileinfo = identify.fileinfo(extracted_path)

    return extracted_path, fileinfo, hdr
Exemplo n.º 3
0
def test_alert_create_bundle(datastore, login_session):
    _, session, host = login_session

    resp = get_api_data(session,
                        f"{host}/api/v4/bundle/{ALERT_ID}/?use_alert",
                        raw=True)
    assert is_cart(resp[:256])
Exemplo n.º 4
0
def test_create_bundle(datastore, login_session):
    _, session, host = login_session

    sid = random.choice(
        datastore.submission.search('id:*', rows=100,
                                    as_obj=False)['items'])['sid']
    resp = get_api_data(session, f"{host}/api/v4/bundle/{sid}/", raw=True)
    assert is_cart(resp[:256])
Exemplo n.º 5
0
def encode_file(input_path, name, metadata=None):
    if metadata is None:
        metadata = {}

    _, output_path = tempfile.mkstemp()

    with open(output_path, 'wb') as oh:
        with open(input_path, 'rb') as ih:
            data = ih.read(64)
            if not is_cart(data):
                ih.seek(0)
                metadata.update({'name': name})
                pack_stream(ih, oh, metadata)
                return output_path, f"{name}.cart"
            else:
                return input_path, name
Exemplo n.º 6
0
def test_alert_bundle(datastore_connection, filestore, config):
    # Cleanup previous runs
    datastore_connection.alert.delete(ALERT_ID)

    # Create a temporary submission
    submission = create_submission(datastore_connection, filestore)
    sid = submission['sid']

    # Create a random alert
    alert = random_model_obj(Alert)
    alert.alert_id = ALERT_ID
    alert.sid = sid
    datastore_connection.alert.save(ALERT_ID, alert)

    # Create the submission's bundle
    path = create_bundle(ALERT_ID, use_alert=True)

    # Test if the bundle
    assert os.path.exists(path)
    with open(path, 'rb') as fh:
        assert is_cart(fh.read(256))

    # Remove alert and submission from DB
    datastore_connection.alert.delete(alert.alert_id)
    datastore_connection.delete_submission_tree(sid, transport=filestore)
    assert datastore_connection.alert.get_if_exists(alert.alert_id) is None
    assert datastore_connection.submission.get_if_exists(sid) is None

    # Restore bundle
    new_submission = import_bundle(path)

    # Validate restored submission
    assert new_submission['sid'] == sid
    assert new_submission['metadata']['bundle.source'] == config.ui.fqdn

    # Validate restored alert
    new_alert = datastore_connection.alert.get_if_exists(alert.alert_id, as_obj=False)
    assert new_alert['alert_id'] == ALERT_ID
    assert new_alert['sid'] == sid
    assert new_alert['metadata']['bundle.source'] == config.ui.fqdn

    # Cleanup
    assert not os.path.exists(path)
    datastore_connection.alert.delete(alert.alert_id)
    datastore_connection.delete_submission_tree(sid, transport=filestore)
    assert datastore_connection.alert.get_if_exists(alert.alert_id) is None
    assert datastore_connection.submission.get_if_exists(sid) is None
Exemplo n.º 7
0
def import_bundle(**_):
    """
    Import a bundle file into the system

    Variables:
    None

    Arguments:
    min_classification      => Minimum classification that the files and result from the bundle should get

    Data Block:
    The bundle file to import

    Result example:
    {"success": true}
    """
    min_classification = request.args.get('min_classification', Classification.UNRESTRICTED)
    allow_incomplete = request.args.get('allow_incomplete', 'true').lower() == 'true'

    current_bundle = os.path.join(BUNDLING_DIR, f"{get_random_id()}.bundle")

    with open(current_bundle, 'wb') as fh:
        if request.data[:3] == BUNDLE_MAGIC or is_cart(request.data[:256]):
            fh.write(request.data)
        else:
            try:
                fh.write(base64.b64decode(request.data))
            except binascii.Error:
                fh.write(request.data)

    try:
        bundle_import(current_bundle, working_dir=BUNDLING_DIR, min_classification=min_classification,
                      allow_incomplete=allow_incomplete)
        return make_api_response({'success': True})
    except InvalidClassification as ice:
        return make_api_response({'success': False}, err=str(ice), status_code=400)
    except SubmissionAlreadyExist as sae:
        return make_api_response({'success': False}, err=str(sae), status_code=409)
    except (IncompleteBundle, BundlingException) as b:
        return make_api_response({'success': False}, err=str(b), status_code=400)
Exemplo n.º 8
0
def test_alert_no_submission_bundle(datastore_connection, config):
    # Cleanup previous runs
    datastore_connection.alert.delete(ALERT_ID)
    datastore_connection.submission.delete(SUBMISSION_ID)

    # Create a random alert
    alert = random_model_obj(Alert)
    alert.alert_id = ALERT_ID
    alert.sid = SUBMISSION_ID
    datastore_connection.alert.save(ALERT_ID, alert)

    # Create the submission's bundle
    path = create_bundle(ALERT_ID, use_alert=True)

    # Test if the bundle
    assert os.path.exists(path)
    with open(path, 'rb') as fh:
        assert is_cart(fh.read(256))

    # Remove alert from Datastore
    datastore_connection.alert.delete(alert.alert_id)
    assert datastore_connection.alert.get_if_exists(alert.alert_id) is None
    assert datastore_connection.submission.get_if_exists(alert.sid) is None

    # Restore bundle
    new_submission = import_bundle(path)

    # Validate restored submission
    assert new_submission is None

    # Validate restored alert
    new_alert = datastore_connection.alert.get_if_exists(alert.alert_id)
    assert new_alert['alert_id'] == ALERT_ID
    assert new_alert['sid'] == SUBMISSION_ID
    assert new_alert['metadata']['bundle.source'] == config.ui.fqdn

    # Cleanup
    datastore_connection.alert.delete(ALERT_ID)
    datastore_connection.submission.delete(SUBMISSION_ID)
Exemplo n.º 9
0
def test_submission_bundle(datastore_connection, filestore, config):
    # Create a temporary submission
    submission = create_submission(datastore_connection, filestore)
    sid = submission['sid']

    # Create the submission's bundle
    path = create_bundle(sid)

    # Test if the bundle
    assert os.path.exists(path)
    with open(path, 'rb') as fh:
        assert is_cart(fh.read(256))

    # Remove submission from DB
    datastore_connection.delete_submission_tree(sid, transport=filestore)
    assert datastore_connection.submission.get_if_exists(sid) is None

    # Restore bundle
    new_submission = import_bundle(path, cleanup=False)

    # Validate restored submission
    assert new_submission['sid'] == sid
    assert new_submission['metadata']['bundle.source'] == config.ui.fqdn

    # Test inserting failure
    with pytest.raises(SubmissionAlreadyExist):
        import_bundle(path, cleanup=False)

    # Test skip failure on exist
    new_submission = import_bundle(path, exist_ok=True)

    # Validate restored submission
    assert new_submission['sid'] == sid
    assert new_submission['metadata']['bundle.source'] == config.ui.fqdn

    # Cleanup
    assert not os.path.exists(path)
    datastore_connection.delete_submission_tree(sid, transport=filestore)
    assert datastore_connection.submission.get_if_exists(sid) is None
Exemplo n.º 10
0
def import_bundle(**_):
    """
    Import a bundle file into the system

    Variables:
    None

    Arguments:
    allow_incomplete        => allow importing incomplete submission
    rescan_services         => Comma seperated list of services to rescan after importing the bundle
    min_classification      => Minimum classification that the files and result from the bundle should get
    exist_ok                => Does not fail if submission already exists

    Data Block:
    The bundle file to import

    Result example:
    {"success": true}
    """
    allow_incomplete = request.args.get('allow_incomplete',
                                        'false').lower() in ['true', '']
    exist_ok = request.args.get('exist_ok', 'false').lower() in ['true', '']
    min_classification = request.args.get('min_classification',
                                          Classification.UNRESTRICTED)
    rescan_services = request.args.get('rescan_services', None)

    if rescan_services is not None:
        rescan_services = rescan_services.split(',')

    current_bundle = os.path.join(BUNDLING_DIR, f"{get_random_id()}.bundle")

    with open(current_bundle, 'wb') as fh:
        if request.data[:3] == BUNDLE_MAGIC or is_cart(request.data[:256]):
            fh.write(request.data)
        else:
            try:
                fh.write(base64.b64decode(request.data))
            except binascii.Error:
                fh.write(request.data)

    try:
        bundle_import(current_bundle,
                      working_dir=BUNDLING_DIR,
                      min_classification=min_classification,
                      allow_incomplete=allow_incomplete,
                      rescan_services=rescan_services,
                      exist_ok=exist_ok,
                      identify=IDENTIFY)

        return make_api_response({'success': True})
    except SubmissionException as se:
        return make_api_response({'success': False},
                                 err=str(se),
                                 status_code=409)
    except InvalidClassification as ice:
        return make_api_response({'success': False},
                                 err=str(ice),
                                 status_code=400)
    except SubmissionAlreadyExist as sae:
        return make_api_response({'success': False},
                                 err=str(sae),
                                 status_code=409)
    except (IncompleteBundle, BundlingException) as b:
        return make_api_response({'success': False},
                                 err=str(b),
                                 status_code=400)
Exemplo n.º 11
0
def start_ui_submission(ui_sid, **kwargs):
    """
    Start UI submission.

    Starts processing after files where uploaded to the server.

    Variables:
    ui_sid     => UUID for the current UI file upload

    Arguments:
    None

    Data Block (REQUIRED):
    Dictionary of UI specific user settings

    Result example:
    {
     'started': True,                    # Has the submission started processing?
     'sid' : "c7668cfa-...-c4132285142e" # Submission ID
    }
    """
    user = kwargs['user']

    ui_params = request.json
    ui_params['groups'] = kwargs['user']['groups']
    ui_params['quota_item'] = True
    ui_params['submitter'] = user['uname']

    if not Classification.is_accessible(user['classification'],
                                        ui_params['classification']):
        return make_api_response({
            "started": False,
            "sid": None
        }, "You cannot start a scan with higher "
                                 "classification then you're allowed to see",
                                 403)

    quota_error = check_submission_quota(user)
    if quota_error:
        return make_api_response("", quota_error, 503)

    submit_result = None
    submitted_file = None

    try:
        # Download the file from the cache
        with forge.get_cachestore("flowjs", config) as cache:
            ui_sid = get_cache_name(ui_sid)
            if cache.exists(ui_sid):
                target_dir = os.path.join(TEMP_DIR, ui_sid)
                os.makedirs(target_dir, exist_ok=True)

                target_file = os.path.join(target_dir,
                                           ui_params.pop('filename', ui_sid))

                if os.path.exists(target_file):
                    os.unlink(target_file)

                # Save the reconstructed file
                cache.download(ui_sid, target_file)
                submitted_file = target_file

        # Submit the file
        if submitted_file is not None:
            with open(submitted_file, 'rb') as fh:
                if is_cart(fh.read(256)):
                    meta = get_metadata_only(submitted_file)
                    if meta.get('al',
                                {}).get('type',
                                        'unknown') == 'archive/bundle/al':
                        try:
                            submission = import_bundle(submitted_file,
                                                       allow_incomplete=True,
                                                       identify=IDENTIFY)
                        except Exception as e:
                            return make_api_response("",
                                                     err=str(e),
                                                     status_code=400)
                        return make_api_response({
                            "started": True,
                            "sid": submission['sid']
                        })

            if not ui_params['description']:
                ui_params[
                    'description'] = f"Inspection of file: {os.path.basename(submitted_file)}"

            # Submit to dispatcher
            try:
                params = ui_to_submission_params(ui_params)

                # Enforce maximum DTL
                if config.submission.max_dtl > 0:
                    params['ttl'] = min(int(
                        params['ttl']), config.submission.max_dtl) if int(
                            params['ttl']) else config.submission.max_dtl

                submission_obj = Submission({"files": [], "params": params})
            except (ValueError, KeyError) as e:
                return make_api_response("", err=str(e), status_code=400)

            try:
                submit_result = SubmissionClient(
                    datastore=STORAGE,
                    filestore=FILESTORE,
                    config=config,
                    identify=IDENTIFY).submit(submission_obj,
                                              local_files=[submitted_file])
                submission_received(submission_obj)
            except SubmissionException as e:
                return make_api_response("", err=str(e), status_code=400)

            return make_api_response({
                "started": True,
                "sid": submit_result.sid
            })
        else:
            return make_api_response({
                "started": False,
                "sid": None
            }, "No files where found for ID %s. "
                                     "Try again..." % ui_sid, 404)
    finally:
        if submit_result is None:
            decrement_submission_quota(user)

        # Remove file
        if os.path.exists(submitted_file):
            os.unlink(submitted_file)

        # Remove dir
        if os.path.exists(target_dir) and os.path.isdir(target_dir):
            os.rmdir(target_dir)
Exemplo n.º 12
0
def import_bundle(path, working_dir=WORK_DIR, min_classification=Classification.UNRESTRICTED, allow_incomplete=False):
    with forge.get_datastore(archive_access=True) as datastore:
        current_working_dir = os.path.join(working_dir, get_random_id())
        res_file = os.path.join(current_working_dir, "results.json")
        try:
            os.makedirs(current_working_dir)
        except Exception:
            pass

        with open(path, 'rb') as original_file:
            if is_cart(original_file.read(256)):
                original_file.seek(0)

                extracted_fd, extracted_path = tempfile.mkstemp()
                extracted_file = os.fdopen(extracted_fd, 'wb')

                try:
                    hdr, _ = unpack_stream(original_file, extracted_file)
                    if hdr.get('al', {}).get('type', 'unknown') != BUNDLE_TYPE:
                        raise BundlingException(f"Not a valid CaRTed bundle, should be of type: {BUNDLE_TYPE}")
                finally:
                    extracted_file.close()
            else:
                extracted_path = path

        # Extract  the bundle
        try:
            subprocess.check_call(["tar", "-zxf", extracted_path, "-C", current_working_dir])
        except subprocess.CalledProcessError:
            raise BundlingException("Bundle decompression failed. Not a valid bundle...")

        with open(res_file, 'rb') as fh:
            data = json.load(fh)

        submission = data['submission']
        results = data['results']
        files = data['files']
        errors = data['errors']

        try:
            sid = submission['sid']
            # Check if we have all the service results
            for res_key in submission['results']:
                if res_key not in results['results'].keys() and not allow_incomplete:
                    raise IncompleteBundle("Incomplete results in bundle. Skipping %s..." % sid)

            # Check if we have all files
            for sha256 in list(set([x[:64] for x in submission['results']])):
                if sha256 not in files['infos'].keys() and not allow_incomplete:
                    raise IncompleteBundle("Incomplete files in bundle. Skipping %s..." % sid)

            # Check if we all errors
            for err_key in submission['errors']:
                if err_key not in errors['errors'].keys() and not allow_incomplete:
                    raise IncompleteBundle("Incomplete errors in bundle. Skipping %s..." % sid)

            if datastore.submission.get(sid, as_obj=False):
                raise SubmissionAlreadyExist("Submission %s already exists." % sid)

            # Make sure bundle's submission meets minimum classification and save the submission
            submission['classification'] = Classification.max_classification(submission['classification'],
                                                                             min_classification)
            submission.update(Classification.get_access_control_parts(submission['classification']))
            datastore.submission.save(sid, submission)

            # Make sure files meet minimum classification and save the files
            with forge.get_filestore() as filestore:
                for f, f_data in files['infos'].items():
                    f_classification = Classification.max_classification(f_data['classification'], min_classification)
                    datastore.save_or_freshen_file(f, f_data, f_data['expiry_ts'], f_classification,
                                                   cl_engine=Classification)
                    try:
                        filestore.upload(os.path.join(current_working_dir, f), f)
                    except IOError:
                        pass

            # Make sure results meet minimum classification and save the results
            for key, res in results['results'].items():
                if key.endswith(".e"):
                    datastore.emptyresult.save(key, {"expiry_ts": res['expiry_ts']})
                else:
                    res['classification'] = Classification.max_classification(res['classification'], min_classification)
                    datastore.result.save(key, res)

            # Make sure errors meet minimum classification and save the errors
            for ekey, err in errors['errors'].items():
                datastore.error.save(ekey, err)

            return submission
        finally:
            try:
                os.remove(extracted_path)
            except Exception:
                pass

            try:
                os.remove(path)
            except Exception:
                pass

            try:
                shutil.rmtree(current_working_dir, ignore_errors=True)
            except Exception:
                pass
Exemplo n.º 13
0
def import_bundle(path,
                  working_dir=WORK_DIR,
                  min_classification=Classification.UNRESTRICTED,
                  allow_incomplete=False,
                  rescan_services=None,
                  exist_ok=False,
                  cleanup=True,
                  identify=None):
    with forge.get_datastore(archive_access=True) as datastore:
        current_working_dir = os.path.join(working_dir, get_random_id())
        res_file = os.path.join(current_working_dir, "results.json")
        try:
            os.makedirs(current_working_dir)
        except Exception:
            pass

        with open(path, 'rb') as original_file:
            if is_cart(original_file.read(256)):
                original_file.seek(0)

                extracted_fd, extracted_path = tempfile.mkstemp()
                extracted_file = os.fdopen(extracted_fd, 'wb')

                try:
                    hdr, _ = unpack_stream(original_file, extracted_file)
                    if hdr.get('al', {}).get('type', 'unknown') != BUNDLE_TYPE:
                        raise BundlingException(
                            f"Not a valid CaRTed bundle, should be of type: {BUNDLE_TYPE}"
                        )
                finally:
                    extracted_file.close()
            else:
                extracted_path = path

        # Extract  the bundle
        try:
            subprocess.check_call(
                ["tar", "-zxf", extracted_path, "-C", current_working_dir])
        except subprocess.CalledProcessError:
            raise BundlingException(
                "Bundle decompression failed. Not a valid bundle...")

        with open(res_file, 'rb') as fh:
            data = json.load(fh)

        alert = data.get('alert', None)
        submission = data.get('submission', None)

        try:
            if submission:
                sid = submission['sid']

                # Load results, files and errors
                results = data.get('results', None)
                files = data.get('files', None)
                errors = data.get('errors', None)

                # Check if we have all the service results
                for res_key in submission['results']:
                    if results is None or (res_key
                                           not in results['results'].keys()
                                           and not allow_incomplete):
                        raise IncompleteBundle(
                            "Incomplete results in bundle. Skipping %s..." %
                            sid)

                # Check if we have all files
                for sha256 in list(set([x[:64]
                                        for x in submission['results']])):
                    if files is None or (sha256 not in files['infos'].keys()
                                         and not allow_incomplete):
                        raise IncompleteBundle(
                            "Incomplete files in bundle. Skipping %s..." % sid)

                # Check if we all errors
                for err_key in submission['errors']:
                    if errors is None or (err_key
                                          not in errors['errors'].keys()
                                          and not allow_incomplete):
                        raise IncompleteBundle(
                            "Incomplete errors in bundle. Skipping %s..." %
                            sid)

                # Check if the submission does not already exist
                if not datastore.submission.exists(sid):
                    # Make sure bundle's submission meets minimum classification and save the submission
                    submission[
                        'classification'] = Classification.max_classification(
                            submission['classification'], min_classification)
                    submission.setdefault('metadata', {})
                    submission['metadata']['bundle.loaded'] = now_as_iso()
                    submission['metadata'].pop('replay', None)
                    submission.update(
                        Classification.get_access_control_parts(
                            submission['classification']))

                    if not rescan_services:
                        # Save the submission in the system
                        datastore.submission.save(sid, submission)

                    # Make sure files meet minimum classification and save the files
                    with forge.get_filestore() as filestore:
                        for f, f_data in files['infos'].items():
                            f_classification = Classification.max_classification(
                                f_data['classification'], min_classification)
                            datastore.save_or_freshen_file(
                                f,
                                f_data,
                                f_data['expiry_ts'],
                                f_classification,
                                cl_engine=Classification)
                            try:
                                filestore.upload(
                                    os.path.join(current_working_dir, f), f)
                            except IOError:
                                pass

                        # Make sure results meet minimum classification and save the results
                        for key, res in results['results'].items():
                            if key.endswith(".e"):
                                datastore.emptyresult.save(
                                    key, {"expiry_ts": res['expiry_ts']})
                            else:
                                res['classification'] = Classification.max_classification(
                                    res['classification'], min_classification)
                                datastore.result.save(key, res)

                        # Make sure errors meet minimum classification and save the errors
                        for ekey, err in errors['errors'].items():
                            datastore.error.save(ekey, err)

                        # Start the rescan
                        if rescan_services and SubmissionClient:
                            extracted_file_infos = {
                                k: {
                                    vk: v[vk]
                                    for vk in [
                                        'magic', 'md5', 'mime', 'sha1',
                                        'sha256', 'size', 'type'
                                    ]
                                }
                                for k, v in files['infos'].items()
                                if k in files['list']
                            }
                            with SubmissionClient(datastore=datastore,
                                                  filestore=filestore,
                                                  config=config,
                                                  identify=identify) as sc:
                                sc.rescan(submission, results['results'],
                                          extracted_file_infos, files['tree'],
                                          list(errors['errors'].keys()),
                                          rescan_services)
                elif not exist_ok:
                    raise SubmissionAlreadyExist(
                        "Submission %s already exists." % sid)

            # Save alert if present and does not exist
            if alert and not datastore.alert.exists(alert['alert_id']):
                alert['classification'] = Classification.max_classification(
                    alert['classification'], min_classification)
                alert.setdefault('metadata', {})
                alert['metadata']['bundle.loaded'] = now_as_iso()

                alert['metadata'].pop('replay', None)
                alert['workflows_completed'] = False

                datastore.alert.save(alert['alert_id'], alert)

            return submission
        finally:
            if extracted_path != path and os.path.exists(extracted_path):
                os.remove(extracted_path)

            if cleanup and os.path.exists(path):
                os.remove(path)

            if os.path.exists(current_working_dir):
                shutil.rmtree(current_working_dir, ignore_errors=True)
Exemplo n.º 14
0
def start_ui_submission(ui_sid, **kwargs):
    """
    Start UI submission.

    Starts processing after files where uploaded to the server.

    Variables:
    ui_sid     => UUID for the current UI file upload

    Arguments:
    None

    Data Block (REQUIRED):
    Dictionary of UI specific user settings

    Result example:
    {
     'started': True,                    # Has the submission started processing?
     'sid' : "c7668cfa-...-c4132285142e" # Submission ID
    }
    """
    user = kwargs['user']

    ui_params = request.json
    ui_params['groups'] = kwargs['user']['groups']
    ui_params['quota_item'] = True
    ui_params['submitter'] = user['uname']

    if not Classification.is_accessible(user['classification'], ui_params['classification']):
        return make_api_response({"started": False, "sid": None}, "You cannot start a scan with higher "
                                                                  "classification then you're allowed to see", 403)

    quota_error = check_submission_quota(user)
    if quota_error:
        return make_api_response("", quota_error, 503)

    submit_result = None
    request_files = []
    request_dirs = []
    fnames = []
    try:
        flist = glob.glob(TEMP_DIR + ui_sid + "*")
        if len(flist) > 0:
            # Generate file list
            for fpath in flist:
                request_dirs.append(fpath)
                files = os.listdir(fpath)
                for myfile in files:
                    request_files.append(os.path.join(fpath, myfile))
                    if myfile not in fnames:
                        fnames.append(myfile)

            with open(request_files[0], 'rb') as fh:
                if is_cart(fh.read(256)):
                    meta = get_metadata_only(request_files[0])
                    if meta.get('al', {}).get('type', 'unknown') == 'archive/bundle/al':
                        try:
                            submission = import_bundle(request_files[0])
                        except Exception as e:
                            return make_api_response("", err=str(e), status_code=400)
                        return make_api_response({"started": True, "sid": submission['sid']})

            if not ui_params['description']:
                ui_params['description'] = "Inspection of file%s: %s" % ({True: "s", False: ""}[len(fnames) > 1],
                                                                         ", ".join(fnames))

            # Submit to dispatcher
            try:
                submission_obj = Submission({
                    "files": [],
                    "params": ui_to_submission_params(ui_params)
                })
            except (ValueError, KeyError) as e:
                return make_api_response("", err=str(e), status_code=400)

            with forge.get_filestore() as f_transport:
                try:
                    submit_result = SubmissionClient(datastore=STORAGE, filestore=f_transport, config=config)\
                        .submit(submission_obj, local_files=request_files, cleanup=False)
                    submission_received(submission_obj)
                except SubmissionException as e:
                    return make_api_response("", err=str(e), status_code=400)

            return make_api_response({"started": True, "sid": submit_result.sid})
        else:
            return make_api_response({"started": False, "sid": None}, "No files where found for ID %s. "
                                                                      "Try again..." % ui_sid, 404)
    finally:
        if submit_result is None:
            decrement_submission_quota(user)

        # Remove files
        for myfile in request_files:
            try:
                os.unlink(myfile)
            except Exception:
                pass

        # Remove dirs
        for fpath in request_dirs:
            try:
                os.rmdir(fpath)
            except Exception:
                pass