Esempio n. 1
0
def find_starts(target: Binary, start_cut: int = 8) -> Iterable[Match]:
    match_finder = make_start_scout()
    itree = target.partition_without_matches()
    matches = []
    for interval in itree:
        data = target.read()[interval.begin:interval.end]
        for addr, matcher in match_finder(data, interval.begin):
            match, new = Match.deduplicate(target, addr, matcher.cut_size,
                                           dict(certainty=1))
            match.matched_by.add(matcher)
            if new:
                target.matches.add(match)
                matches.append(match)

    return matches
Esempio n. 2
0
def match_matchers_against(target: Binary,
                           graph=None,
                           parallelize=True) -> Iterable[Match]:
    """match_candidates_against matches all candidates against the given
    Binary.

	Args:
        target: the target binary to match the candidates against
    """

    settings = SettingsStorage.get_settings()
    parallelize = parallelize or settings.get("matcher_parallelization", False)
    if graph is None:
        graph = makeGraph()
    bin_ = target.read()
    partitions = target.partition()
    if parallelize:
        matches = parallel_prepartioned_graph_match(graph, bin_, partitions)
    else:
        matches = prepartioned_graph_match(graph, bin_, partitions)
    match_results = []
    for matchers, size, end in matches:
        start = end - size
        match, new = Match.deduplicate(target, start, size,
                                       dict(certainty=1 / len(matchers)))
        for matcher in matchers:
            match.matched_by.add(matcher)
        if new:
            target.matches.add(match)
            match_results.append(match)
    orm.commit()

    if settings.get("find_fnc_starts", False):
        starts = find_starts(target,
                             start_cut=settings.get("fnc_start_size", 8))
        match_results.extend(starts)
        orm.commit()
    return match_results