Esempio n. 1
0
def by(**type):
    '''Return the segment matching the specified keywords in `type`.'''
    searchstring = ', '.join("{:s}={!r}".format(key, value)
                             for key, value in six.iteritems(type))

    res = builtins.list(__iterate__(**type))
    if len(res) > 1:
        maxaddr = max(builtins.map(operator.attrgetter('endEA'), res) or [1])
        caddr = math.ceil(math.log(maxaddr) / math.log(16))
        builtins.map(
            logging.info,
            (("[{:d}] {:0{:d}x}:{:0{:d}x} {:s} {:+#x} sel:{:04x} flags:{:02x}".
              format(seg.index, seg.startEA, int(caddr), seg.endEA, int(caddr),
                     idaapi.get_true_segm_name(seg), seg.size(), seg.sel,
                     seg.flags)) for seg in res))
        logging.warn(
            "{:s}.by({:s}) : Found {:d} matching results. Returning the first segment at index {:d} from {:0{:d}x}<>{:0{:d}x} with the name {:s} and size {:+#x}."
            .format(__name__, searchstring, len(res), res[0].index,
                    res[0].startEA, int(caddr), res[0].endEA, int(caddr),
                    idaapi.get_true_segm_name(res[0]), res[0].size()))

    res = next(iter(res), None)
    if res is None:
        raise E.SearchResultsError(
            "{:s}.by({:s}) : Found 0 matching results.".format(
                __name__, searchstring))
    return res
Esempio n. 2
0
def list(**type):
    '''List all of the enumerations within the database that match the keyword specified by `type`.'''
    res = builtins.list(iterate(**type))

    maxindex = max(builtins.map(idaapi.get_enum_idx, res) or [1])
    maxname = max(
        builtins.map(utils.fcompose(idaapi.get_enum_name, len), res) or [0])
    maxsize = max(builtins.map(size, res) or [0])
    cindex = math.ceil(math.log(maxindex or 1) / math.log(10))
    try:
        cmask = max(
            builtins.map(
                utils.fcompose(
                    mask,
                    utils.fcondition(utils.fpartial(operator.eq, 0))(
                        utils.fconstant(1), utils.fidentity), math.log,
                    functools.partial(operator.mul, 1.0 / math.log(8)),
                    math.ceil), res) or [database.config.bits() / 4.0])
    except:
        cmask = 0

    for n in res:
        six.print_(
            "[{:{:d}d}] {:>{:d}s} & {:<{:d}x} ({:d} members){:s}".format(
                idaapi.get_enum_idx(n), int(cindex), idaapi.get_enum_name(n),
                maxname, mask(n), int(cmask), len(builtins.list(members(n))),
                " // {:s}".format(comment(n)) if comment(n) else ''))
    return
Esempio n. 3
0
def by(**type):
    '''Return the segment matching the specified keywords in `type`.'''
    searchstring = utils.string.kwargs(type)
    get_segment_name = idaapi.get_segm_name if hasattr(
        idaapi, 'get_segm_name') else idaapi.get_true_segm_name

    listable = builtins.list(__iterate__(**type))
    if len(listable) > 1:
        maxaddr = max(builtins.map(interface.range.end, listable) or [1])
        caddr = math.ceil(math.log(maxaddr) / math.log(16))
        builtins.map(
            logging.info,
            ((u"[{:d}] {:0{:d}x}:{:0{:d}x} {:s} {:+#x} sel:{:04x} flags:{:02x}"
              .format(seg.index, interface.range.start(seg), int(caddr),
                      interface.range.end(seg), int(caddr),
                      utils.string.of(get_segment_name(seg)), seg.size(),
                      seg.sel, seg.flags)) for seg in listable))
        logging.warn(
            u"{:s}.by({:s}) : Found {:d} matching results. Returning the first segment at index {:d} from {:0{:d}x}<>{:0{:d}x} with the name {:s} and size {:+#x}."
            .format(__name__, searchstring, len(listable), listable[0].index,
                    interface.range.start(listable[0]), int(caddr),
                    interface.range.end(listable[0]), int(caddr),
                    utils.string.of(get_segment_name(listable[0])),
                    listable[0].size()))

    res = six.next(iter(listable), None)
    if res is None:
        raise E.SearchResultsError(
            u"{:s}.by({:s}) : Found 0 matching results.".format(
                __name__, searchstring))
    return res
Esempio n. 4
0
def list(**type):
    '''List all of the segments in the database that match the keyword specified by `type`.'''
    res = builtins.list(__iterate__(**type))

    maxindex = max(builtins.map(operator.attrgetter('index'), res) or [1])
    maxaddr = max(builtins.map(operator.attrgetter('endEA'), res) or [1])
    maxsize = max(builtins.map(operator.methodcaller('size'), res) or [1])
    maxname = max(
        builtins.map(utils.fcompose(idaapi.get_true_segm_name, len), res)
        or [1])
    cindex = math.ceil(math.log(maxindex or 1) / math.log(10))
    caddr = math.ceil(math.log(maxaddr or 1) / math.log(16))
    csize = math.ceil(math.log(maxsize or 1) / math.log(16))

    for seg in res:
        comment = idaapi.get_segment_cmt(seg, 0) or idaapi.get_segment_cmt(
            seg, 1)
        six.print_(
            u"[{:{:d}d}] {:#0{:d}x}<>{:#0{:d}x} : {:<+#{:d}x} : {:>{:d}s} : sel:{:04x} flags:{:02x}{:s}"
            .format(
                seg.index, int(cindex), seg.startEA, 2 + int(caddr), seg.endEA,
                2 + int(caddr), seg.size(), 3 + int(csize),
                utils.string.of(idaapi.get_true_segm_name(seg)),
                maxname, seg.sel, seg.flags, u"// {:s}".format(
                    utils.string.of(comment)) if comment else ''))
    return
Esempio n. 5
0
 def list(cls, enum):
     '''List all the members belonging to the enumeration identified by `enum`.'''
     # FIXME: make this consistent with every other .list using the matcher class
     eid = by(enum)
     res = builtins.list(cls.iterate(eid))
     maxindex = max(builtins.map(utils.first, enumerate(res)) or [1])
     maxvalue = max(builtins.map(utils.fcompose(member.value, "{:#x}".format, len), res) or [1])
     for i, mid in enumerate(res):
          six.print_(u"[{:d}] 0x{:>0{:d}x} {:s}".format(i, member.value(mid), maxvalue, member.name(mid)))
     return
Esempio n. 6
0
 def list(cls, enum):
     '''List all the members belonging to the enumeration identified by `enum`.'''
     # FIXME: make this consistent with every other .list using the matcher class
     eid = by(enum)
     res = builtins.list(cls.iterate(eid))
     maxindex = max(builtins.map(utils.first, enumerate(res)) or [1])
     maxvalue = max(builtins.map(utils.fcompose(member.value, "{:#x}".format, len), res) or [1])
     for i, mid in enumerate(res):
          six.print_(u"[{:d}] 0x{:>0{:d}x} {:s}".format(i, member.value(mid), maxvalue, member.name(mid)))
     return
Esempio n. 7
0
def recovermarks():
    """Walk through the tags made by ``colormarks`` and re-create the marks that were found.

    This is useful if any marks were accidentally deleted and can be used for
    recovering them as long as they were initally tagged properly.
    """
    # collect
    result = []
    for fn, l in database.select('marks'):
        m = set((l['marks']) if hasattr(l['marks'], '__iter__') else
                [int(x, 16) for x in l['marks'].
                 split(',')] if type(l['marks']) is str else [l['marks']])
        res = [(ea, d['mark']) for ea, d in func.select(fn, 'mark')]
        if m != {a for a, _ in res}:
            logging.warning(
                "{:s} : Ignoring the function tag \"{:s}\" for function {:#x} due to its value being out-of-sync with the contents values ({!s} <> {!s})."
                .format('.'.join((__name__, 'recovermarks')), fn,
                        builtins.map(hex, m),
                        builtins.map(hex, set(a for a, _ in res))))
        result.extend(res)
    result.sort(cmp=lambda x, y: cmp(x[1], y[1]))

    # discovered marks versus database marks
    result = dict(result)
    current = {ea: descr for ea, descr in database.marks()}

    # create tags
    for x, y in result.items():
        if x in current:
            logging.warning(
                "{:#x}: skipping already existing mark : {!r}".format(
                    x, current[x]))
            continue

        # x not in current
        if x not in current:
            logging.info("{:#x}: adding missing mark due to tag : {!r}".format(
                x, result[x]))
        elif current[x] != result[x]:
            logging.info(
                "{:#x}: database tag is different than mark description : {!r}"
                .format(x, result[x]))
        else:
            assert current[x] == result[x]
        database.mark(x, y)

    # marks that aren't reachable in the database
    for ea in set(current.viewkeys()).difference(result.viewkeys()):
        logging.warning("{:#x}: unreachable mark (global) : {!r}".format(
            ea, current[ea]))

    # color them
    colormarks()
Esempio n. 8
0
def list(**type):
    '''List all of the enumerations within the database that match the keyword specified by `type`.'''
    res = builtins.list(iterate(**type))

    maxindex = max(builtins.map(idaapi.get_enum_idx, res) or [1])
    maxname = max(builtins.map(utils.fcompose(idaapi.get_enum_name, len), res) or [0])
    maxsize = max(builtins.map(size, res) or [0])
    cindex = math.ceil(math.log(maxindex or 1)/math.log(10))
    try: cmask = max(builtins.map(utils.fcompose(mask, utils.fcondition(utils.fpartial(operator.eq, 0))(utils.fconstant(1), utils.fidentity), math.log, functools.partial(operator.mul, 1.0/math.log(8)), math.ceil), res) or [database.config.bits()/4.0])
    except: cmask = 0

    for n in res:
        name = idaapi.get_enum_name(n)
        six.print_(u"[{:{:d}d}] {:>{:d}s} & {:<{:d}x} ({:d} members){:s}".format(idaapi.get_enum_idx(n), int(cindex), utils.string.of(name), maxname, mask(n), int(cmask), len(builtins.list(members(n))), u" // {:s}".format(comment(n)) if comment(n) else ''))
    return
Esempio n. 9
0
def by(**type):
    '''Return the segment matching the specified keywords in `type`.'''
    searchstring = utils.string.kwargs(type)

    res = builtins.list(__iterate__(**type))
    if len(res) > 1:
        maxaddr = max(builtins.map(operator.attrgetter('endEA'), res) or [1])
        caddr = math.ceil(math.log(maxaddr)/math.log(16))
        builtins.map(logging.info, ((u"[{:d}] {:0{:d}x}:{:0{:d}x} {:s} {:+#x} sel:{:04x} flags:{:02x}".format(seg.index, seg.startEA, int(caddr), seg.endEA, int(caddr), utils.string.of(idaapi.get_true_segm_name(seg)), seg.size(), seg.sel, seg.flags)) for seg in res))
        logging.warn(u"{:s}.by({:s}) : Found {:d} matching results. Returning the first segment at index {:d} from {:0{:d}x}<>{:0{:d}x} with the name {:s} and size {:+#x}.".format(__name__, searchstring, len(res), res[0].index, res[0].startEA, int(caddr), res[0].endEA, int(caddr), utils.string.of(idaapi.get_true_segm_name(res[0])), res[0].size()))

    res = next(iter(res), None)
    if res is None:
        raise E.SearchResultsError(u"{:s}.by({:s}) : Found 0 matching results.".format(__name__, searchstring))
    return res
Esempio n. 10
0
def list(**type):
    '''List all of the segments in the database that match the keyword specified by `type`.'''
    res = builtins.list(__iterate__(**type))

    maxindex = max(builtins.map(operator.attrgetter('index'), res) or [1])
    maxaddr = max(builtins.map(operator.attrgetter('endEA'), res) or [1])
    maxsize = max(builtins.map(operator.methodcaller('size'), res) or [1])
    maxname = max(builtins.map(utils.fcompose(idaapi.get_true_segm_name,len), res) or [1])
    cindex = math.ceil(math.log(maxindex or 1)/math.log(10))
    caddr = math.ceil(math.log(maxaddr or 1)/math.log(16))
    csize = math.ceil(math.log(maxsize or 1)/math.log(16))

    for seg in res:
        comment = idaapi.get_segment_cmt(seg, 0) or idaapi.get_segment_cmt(seg, 1)
        six.print_(u"[{:{:d}d}] {:#0{:d}x}<>{:#0{:d}x} : {:<+#{:d}x} : {:>{:d}s} : sel:{:04x} flags:{:02x}{:s}".format(seg.index, int(cindex), seg.startEA, 2+int(caddr), seg.endEA, 2+int(caddr), seg.size(), 3+int(csize), utils.string.of(idaapi.get_true_segm_name(seg)), maxname, seg.sel, seg.flags, u"// {:s}".format(utils.string.of(comment)) if comment else ''))
    return
Esempio n. 11
0
 def _collect(addr, result):
     process = set()
     for blk in builtins.map(func.block, func.block.after(addr)):
         if any(blk in coll for coll in (result, sentinel)):
             continue
         process.add(blk)
     for addr, _ in process:
         result |= _collect(addr, result | process)
     return result
Esempio n. 12
0
 def _collect(addr, result):
     process = set()
     for blk in builtins.map(func.block, func.block.after(addr)):
         if any(blk in coll for coll in (result, sentinel)):
             continue
         process.add(blk)
     for addr, _ in process:
         result |= _collect(addr, result | process)
     return result
Esempio n. 13
0
def groupby(sequence, count):
    '''Group sequence by number of elements'''
    idata = enumerate(sequence)

    def fkey(item):
        (index, value) = item
        return index // count

    for key, res in itertools.groupby(idata, fkey):
        yield builtins.map(operator.itemgetter(1), res)
    return
Esempio n. 14
0
def __iterate__(**type):
    '''Iterate through each segment defined in the database that match the keywords specified by `type`.'''
    if not type: type = {'predicate':lambda n: True}
    def newsegment(index):
        res = idaapi.getnseg(index)
        res.index = index
        return res
    res = builtins.map(newsegment, six.moves.range(idaapi.get_segm_qty()))
    for key, value in six.iteritems(type):
        res = builtins.list(__matcher__.match(key, value, res))
    for item in res: yield item
Esempio n. 15
0
def parallelMap(f, items, nProcs=None):
    if nProcs == 1:
        return builtins.map(f, items)
    else:
        name = parallelMap.i
        parallelMap.scope[name] = (f, items)
        parallelMap.i += 1
        pool = multiprocessing.Pool(nProcs)
        result = pool.map(_parallel_func,
                          ((name, i) for i in range(len(items))))
        pool.close()
        del parallelMap.scope[name]
        return result
Esempio n. 16
0
def __iterate__(**type):
    '''Iterate through each segment defined in the database that match the keywords specified by `type`.'''
    if not type: type = {'predicate': lambda n: True}

    def newsegment(index):
        res = idaapi.getnseg(index)
        res.index = index
        return res

    res = builtins.map(newsegment, six.moves.range(idaapi.get_segm_qty()))
    for key, value in six.iteritems(type):
        res = builtins.list(__matcher__.match(key, value, res))
    for item in res:
        yield item
Esempio n. 17
0
def ppmap(processes, function, sequence, *sequences):
    """Split the work of 'function' across the given number of
    processes.  Set 'processes' to None to let Parallel Python
    autodetect the number of children to use.

    Although the calling semantics should be identical to
    __builtin__.map (even using __builtin__.map to process
    arguments), it differs in that it returns a generator instead of a
    list.  This enables lazy evaluation of the results so that other
    work can be done while the subprocesses are still running.

    >>> def rangetotal(n): return n, sum(range(n))
    >>> list(map(rangetotal, range(1, 6)))
    [(1, 0), (2, 1), (3, 3), (4, 6), (5, 10)]
    >>> list(ppmap(1, rangetotal, range(1, 6)))
    [(1, 0), (2, 1), (3, 3), (4, 6), (5, 10)]
    """

    # Create a new server if one isn't already initialized
    if not __STATE["server"]:
        __STATE["server"] = pp.Server()

    def submit(*args):
        """Send a job to the server"""
        return __STATE["server"].submit(function, args, globals=globals())

    # Merge all the passed-in argument lists together.  This is done
    # that way because as with the map() function, at least one list
    # is required but the rest are optional.
    a = [sequence]
    a.extend(sequences)

    # Set the requested level of multi-processing
    __STATE["server"].set_ncpus(processes or "autodetect")

    # First, submit all the jobs.  Then harvest the results as they
    # come available.
    return (subproc() for subproc in builtins.map(submit, *a))
Esempio n. 18
0
def groupby(sequence, count):
    '''Group sequence by number of elements'''
    key, data = lambda (index, value): index / count, enumerate(sequence)
    for key, res in itertools.groupby(data, key):
        yield builtins.map(operator.itemgetter(1), res)
    return
Esempio n. 19
0
def groupby(sequence, count):
    '''Group sequence by number of elements'''
    key, data = lambda (index, value): index / count, enumerate(sequence)
    for key, res in itertools.groupby(data, key):
        yield builtins.map(operator.itemgetter(1), res)
    return
Esempio n. 20
0
    'fitem', 'fhasattr', 'fattributeQ', 'fgetattr', 'fattribute', 'fconstant',
    'fpassthru', 'fdefault', 'fpass', 'fidentity', 'fid', 'first', 'second',
    'third', 'last', 'fcompose', 'fdiscard', 'fcondition', 'fmap', 'flazy',
    'fmemo', 'fpartial', 'fapply', 'fcurry', 'frpartial', 'freverse',
    'freversed', 'fexc', 'fexception', 'fcatch', 'fcomplement', 'fnot',
    'ilist', 'liter', 'ituple', 'titer', 'itake', 'iget', 'imap', 'ifilter',
    'ichain', 'izip', 'count'
]

### functional programming primitives (FIXME: probably better to document these with examples)

# box any specified arguments
fbox = fboxed = lambda *a: a
# return a closure that executes `f` with the arguments unboxed.
funbox = lambda f, *a, **k: lambda *ap, **kp: f(
    *(a + builtins.reduce(operator.add, builtins.map(builtins.tuple, ap),
                          ())), **builtins.dict(k.items() + kp.items()))
# return a closure that will check that `object` is an instance of `type`.
finstance = lambda *type: frpartial(builtins.isinstance, type)
# return a closure that will check if its argument has an item `key`.
fhasitem = fitemQ = lambda key: fcompose(
    fcatch(frpartial(operator.getitem, key)), iter, next,
    fpartial(operator.eq, None))
# return a closure that will get a particular element from an object
fgetitem = fitem = lambda item, *default: lambda object: default[
    0] if default and item not in object else object[item]
# return a closure that will check if its argument has an `attribute`.
fhasattr = fattributeQ = lambda attribute: frpartial(hasattr, attribute)
# return a closure that will get a particular attribute from an object
fgetattr = fattribute = lambda attribute, *default: lambda object: getattr(
    object, attribute, *default)
Esempio n. 21
0
def map(function, *sequence):
    """ map(function, sequence[, sequence, ...]) -> list

    Like the builtin map() function, but splits the workload across a
    pool of processes whenever possible.
    """

    # IPC stuff
    structformat = 'L'  #'H'  #not everything there is short enough.
    structlen = struct.calcsize(structformat)

    def sendmessage(myend, message):
        """Send a pickled message across a pipe"""
        outobj = pickle.dumps(message)
        os.write(myend, struct.pack(structformat, len(outobj)) + outobj)

    def recvmessage(myend):
        """Receive a pickled message from a pipe"""
        length = struct.unpack(structformat, (os.read(myend, structlen)))[0]
        return pickle.loads(os.read(myend, length))

    try:
        maxchildren = function.parallel_maxchildren
    except AttributeError:
        return builtins.map(function, *sequence)

    # Handle map()'s multi-sequence semantics
    if len(sequence) == 1:
        if function is None:
            return list(sequence[0])
        arglist = zip(sequence[0])
    else:
        arglist = builtins.map(None, *sequence)
    if function is None:
        return arglist

    argindex = 0
    finished = 0
    outlist = [None] * len(arglist)

    # Spawn the worker children.  Don't create more than the number of
    # values we'll be processing.
    fromchild, toparent = os.pipe()
    children = []
    for childnum in range(min(maxchildren, len(arglist))):
        fromparent, tochild = os.pipe()
        pid = os.fork()
        # Parent?
        if pid:
            # Do some housekeeping and give the child its first assignment
            children.append({
                'pid': pid,
                'fromparent': fromparent,
                'tochild': tochild,
            })
            sendmessage(tochild, (argindex, arglist[argindex]))
            argindex += 1
        # Child?
        else:
            # Since children can't really tell when they've been
            # orphaned, set a timeout so that they die if they don't
            # hear from the parent in a timely manner.
            def timeouthandler(signum, frame):
                """Get out cleanly"""
                sys.exit()
            oldsignal = signal.signal(signal.SIGALRM, timeouthandler)

            # Keep processing values until the parent kills you
            while True:
                try:
                    # Wait one second before quitting.  Children
                    # should generally hear from their parent almost
                    # instantly.
                    signal.alarm(1)
                    message = recvmessage(fromparent)
                    signal.alarm(0)
                    if message is None:
                        sys.exit()
                    index, value = message
                    sendmessage(toparent, (childnum, index, function(*value)))
                except Exception as excvalue:
                    try:
                        excvalue.tb = traceback.extract_tb(sys.exc_info()[2])
                    except Exception as e2:
                        try:
                            excvalue.e2 = e2
                        except Exception as e3:
                            pass
                    sendmessage(toparent, (childnum, index, excvalue))
                finally:
                    signal.signal(signal.SIGALRM, oldsignal)

    # Keep accepting values back from the children until they've all
    # come back
    while finished < len(arglist):
        returnchild, returnindex, value = recvmessage(fromchild)
        if isinstance(value, Exception):
            try:
                print("Child traceback:")
                print(''.join(traceback.format_list(value.tb)))
            except Exception as e2:
                pass
            raise value
        outlist[returnindex] = value
        finished += 1
        # If there are still values left to process, hand one back out
        # to the child that just finished
        if argindex < len(arglist):
            sendmessage(children[returnchild]['tochild'],
                        (argindex, arglist[argindex]))
            argindex += 1

    # Kill the child processes
    for child in children:
        sendmessage(child['tochild'], None)
    for child in children:
        os.wait()

    return outlist
Esempio n. 22
0
def recovermarks():
    """Walk through the tags made by ``colormarks`` and re-create the marks that were found.

    This is useful if any marks were accidentally deleted and can be used for
    recovering them as long as they were initally tagged properly.
    """
    # collect
    result = []
    for fn, l in database.select('marks'):
        m = set( (l['marks']) if hasattr(l['marks'], '__iter__') else [int(x, 16) for x in l['marks'].split(',')] if type(l['marks']) is str else [l['marks']])
        res = [(ea, d['mark']) for ea, d in func.select(fn, 'mark')]
        if m != { a for a, _ in res }:
            logging.warning("{:s} : Ignoring the function tag \"{:s}\" for function {:#x} due to its value being out-of-sync with the contents values ({!s} <> {!s}).".format('.'.join((__name__, 'recovermarks')), fn, builtins.map(hex, m), builtins.map(hex, set(a for a, _ in res))))
        result.extend(res)
    result.sort(cmp=lambda x, y: cmp(x[1], y[1]))

    # discovered marks versus database marks
    result = dict(result)
    current = {ea : descr for ea, descr in database.marks()}

    # create tags
    for x, y in result.items():
        if x in current:
            logging.warning("{:#x}: skipping already existing mark : {!r}".format(x, current[x]))
            continue

        # x not in current
        if x not in current:
            logging.info("{:#x}: adding missing mark due to tag : {!r}".format(x, result[x]))
        elif current[x] != result[x]:
            logging.info("{:#x}: database tag is different than mark description : {!r}".format(x, result[x]))
        else:
            assert current[x] == result[x]
        database.mark(x, y)

    # marks that aren't reachable in the database
    for ea in set(current.viewkeys()).difference(result.viewkeys()):
        logging.warning("{:#x}: unreachable mark (global) : {!r}".format(ea, current[ea]))

    # color them
    colormarks()