def generate_decoy_match_results(scored_matches_path, decon_data, model_file_path,
                                 prefix_len=0, suffix_len=0, ms1_tolerance=1e-5,
                                 ms2_tolerance=2e-5, num_decoys_per_real_mass=1.0,
                                 method="full_random_forest", random_only=False,
                                 method_init_args=None, method_fit_args=None,
                                 n_processes=6, outfile_path=None):

    logger.info("Creating Decoys")
    predictions = classify_matches.prepare_model_file(scored_matches_path)
    decoy_file_name = make_decoys_from_search_space.taskmain(
        predictions.metadata["db_file_name"],
        prefix_len=prefix_len, suffix_len=suffix_len,
        n_processes=n_processes, out=outfile_path)
    logger.info("Decoy Ion Space: %s", decoy_file_name)
    match_ions2.match_frags(
        decoy_file_name, decon_data,
        ms1_tolerance, ms2_tolerance,
        n_processes=n_processes)
    logger.info("Decoy Matches Done")
    postprocess_file, postprocess_data = postprocess2.main(decoy_file_name)
    logger.info("Decoys Postprocessed: %s", postprocess_file)
    classifier = classify_matches.ClassifyTargetWithModelTask(model_file_path, postprocess_file,
                                                              method=method,
                                                              method_init_args=method_init_args,
                                                              method_fit_args=method_fit_args)
    decoy_matches_path = classifier.run()
    return decoy_matches_path
def postprocess_matches(matched_ions_file):
    path, data = postprocess2.main(matched_ions_file)
    return path