Пример #1
0
def load_file(ctx):
    if not os.path.exists(ctx.filename):
        error("file {ctx.filename} doesn't exist".format(ctx=ctx))
        if ctx.interactive:
            return False
        die()

    if not os.path.isfile(ctx.filename):
        error("this is not a file".format(ctx=ctx))
        if ctx.interactive:
            return False
        die()

    dirname = os.path.dirname(ctx.filename)
    db_path = dirname + "/" if dirname != "" else ""
    db_path += "." + os.path.basename(ctx.filename) + ".db"
    db_exists = os.path.exists(db_path)
    ctx.db_path = db_path

    try:
        dis = Disassembler(ctx.filename,
                           ctx.raw_type,
                           ctx.raw_base,
                           ctx.raw_big_endian,
                           load_symbols=not db_exists)
    except ExcArch as e:
        error("arch %s is not supported" % e.arch)
        if ctx.interactive:
            return False
        die()
    except ExcFileFormat:
        error("the file is not PE or ELF binary")
        if ctx.interactive:
            return False
        die()
    except ExcPEFail as e:
        error(str(e.e))
        error("It seems that pefile.parse_data_directories is bugged.")
        error("Maybe you should Retry")
        if ctx.interactive:
            return False
        die()

    # Load symbols in the database
    if db_exists:
        info("open database %s" % db_path)
        fd = open(db_path, "r")
        db = json.loads(fd.read())
        ctx.db = db
        sym = dis.binary.symbols
        rev_sym = dis.binary.reverse_symbols
        for name, addr in db["symbols"].items():
            sym[name] = addr
            rev_sym[addr] = name
        fd.close()

    ctx.dis = dis
    ctx.libarch = dis.load_arch_module()

    return True
Пример #2
0
    def load(self, filename):
        gc.disable()

        self.__init_vars()

        dirname = os.path.dirname(filename)
        self.path = dirname + "/" if dirname != "" else ""
        self.path +=  "." + os.path.basename(filename) + ".db"

        if os.path.exists(self.path):
            info("open database %s" % self.path)

            fd = open(self.path, "rb")

            data = self.__check_old_json_db(fd)
            if data is None:
                data = msgpack.unpackb(fd.read(), encoding="utf-8")
                fd.close()

            self.__load_symbols(data)
            self.__load_jmptables(data)
            self.__load_comments(data)
            self.__load_meta(data)
            self.__load_memory(data)
            self.__load_functions(data)

            self.loaded = True

        gc.enable()
Пример #3
0
def create_anno(n_threads=70):
    xml_fps = glob.glob('./data/protein_xmls/*.xml')
    create_anno_list = [{'xml_fp': xml_fp} for xml_fp in xml_fps]
    with Pool(n_threads) as p:
        result_iter = p.imap_unordered(create_anno_for_one_xml,
                                       create_anno_list)
        df_list = []
        for i_result, result in enumerate(result_iter):
            info(
                f"({i_result}/{len(create_anno_list)}) {result['task']['xml_fp']}"
            )
            df_list.append(result['df'])
    df = pd.concat(df_list, ignore_index=True)
    df = df[[
        'img_id',
        'ensembl_id',
        'name',
        'location',
        'verification',
        'cell_line',
        'red',
        'green',
        'blue',
        'yellow',
        'img_url',
    ]]
    df.to_csv('./data/hpa_public_imgs_meta.csv', index=False)
Пример #4
0
def load_file(ctx):
    if not os.path.exists(ctx.filename):
        error("file {ctx.filename} doesn't exist".format(ctx=ctx))
        if ctx.interactive:
           return False
        die()

    if not os.path.isfile(ctx.filename):
        error("this is not a file".format(ctx=ctx))
        if ctx.interactive:
           return False
        die()

    dirname = os.path.dirname(ctx.filename)
    db_path = dirname + "/" if dirname != "" else ""
    db_path +=  "." + os.path.basename(ctx.filename) + ".db"
    db_exists = os.path.exists(db_path)
    ctx.db_path = db_path

    try:
        dis = Disassembler(ctx.filename, ctx.raw_type,
                           ctx.raw_base, ctx.raw_big_endian,
                           load_symbols=not db_exists)
    except ExcArch as e:
        error("arch %s is not supported" % e.arch)
        if ctx.interactive:
            return False
        die()
    except ExcFileFormat:
        error("the file is not PE or ELF binary")
        if ctx.interactive:
            return False
        die()
    except ExcPEFail as e:
        error(str(e.e))
        error("It seems that pefile.parse_data_directories is bugged.")
        error("Maybe you should Retry")
        if ctx.interactive:
            return False
        die()

    # Load symbols in the database
    if db_exists:
        info("open database %s" % db_path)
        fd = open(db_path, "r")
        db = json.loads(fd.read())
        ctx.db = db
        sym = dis.binary.symbols
        rev_sym = dis.binary.reverse_symbols
        for name, addr in db["symbols"].items():
            sym[name] = addr
            rev_sym[addr] = name
        fd.close()

    ctx.dis = dis
    ctx.libarch = dis.load_arch_module()

    return True
def get_set(path_to_anno, folder, with_trans=False, n_threads=70):
    anno = pd.read_csv(path_to_anno, index_col=0)
    load_list = [{
        'id_': id_,
        'folder': folder,
        'with_trans': with_trans
    } for id_, row in anno.iterrows()]
    with Pool(n_threads) as p:
        result_iter = p.imap(load_one_img, load_list)
        img_list = []
        for i_result, result in enumerate(result_iter):
            info(f"({i_result}/{len(load_list)}) {result['id_']}")
            img_list.append((result['img'], result['id_']))
    # img_list.sort(key=compare_fn)
    return img_list
Пример #6
0
def download_all_images(n_threads=70):
    xml_fps = glob.glob('./data/protein_xmls/*.xml')
    result_rows = []
    for result in multiprocessing(download_one, ({
            'xml_fp': fp
    } for fp in xml_fps),
                                  len_=len(xml_fps)):
        if len(result['errors']) > 0:
            result_rows.append(result)

    error_df = pd.DataFrame.from_records(
        chain(*[row['errors'] for row in result_rows]))
    print(error_df)
    error_df.to_csv('tmp/error_df.csv', index=False)

    info('ALL DONE !!')
def first_round(
    n_threads=70,
    path_to_test_anno='./data/sample_submission.csv',
    path_to_test_imgs='./data/test_full_size_compressed',
    path_to_hpa_anno='./data/hpa_public_imgs.csv',
    path_to_hpa_imgs='./data/hpa_public_imgs',
):
    test_set = get_set(path_to_test_anno,
                       path_to_test_imgs,
                       n_threads=n_threads)
    hpa_trans_set = get_set(path_to_hpa_anno,
                            path_to_hpa_imgs,
                            with_trans=True,
                            n_threads=n_threads)
    # test_set = get_set('./tmp/test1.csv', './tmp/test1/.', n_threads=n_threads)
    # hpa_trans_set = get_set(
    #     './tmp/test1.csv',
    #     './tmp/test2',
    #     with_trans=True,
    #     n_threads=n_threads,
    # )

    identical_pairss = []
    comparison_list = [{
        'test_img': test_img,
        'test_id': test_id,
        'hpa_trans_set': hpa_trans_set
    } for test_img, test_id in test_set]
    with Pool(n_threads) as p:
        result_iter = p.imap(comparison_for_one_test, comparison_list)
        for i_result, result in enumerate(result_iter):
            info(
                f"Finished ({i_result}/{len(comparison_list)}) {result['test_id']}"
            )
            if len(result['identical_pairs']) > 0:
                debug(
                    f"Found {len(result['identical_pairs'])} identical pairs!")
                identical_pairss.append(result['identical_pairs'])
    identical_pairs = [x for l in identical_pairss for x in l]

    info(f'All done! Found {len(identical_pairs)} pairs.')
    save_path = './tmp/identical_pairs_bk.csv'
    out_df = pd.DataFrame.from_records(identical_pairs).sort_values(
        'diff_score')
    out_df.to_csv(save_path, index=False)
    debug(f'Saved results to {save_path}')
Пример #8
0
    def load(self, filename):
        gc.disable()

        self.__init_vars()

        dirname = os.path.dirname(filename)
        self.path = dirname + "/" if dirname != "" else ""
        self.path += "." + os.path.basename(filename) + ".db"

        if os.path.exists(self.path):
            info("open database %s" % self.path)

            fd = open(self.path, "rb")

            data = self.__check_old_json_db(fd)
            if data is None:
                data = fd.read()
                if data.startswith(b"ZLIB"):
                    data = zlib.decompress(data[4:])
                data = msgpack.unpackb(data, encoding="utf-8")
                fd.close()

            self.__load_meta(data)
            self.__load_memory(data)
            self.__load_symbols(data)
            self.__load_jmptables(data)
            self.__load_comments(data)
            self.__load_functions(data)
            self.__load_history(data)
            self.__load_xrefs(data)
            self.__load_imports(data)

            if self.version <= 1.5:
                self.__load_labels(data)

            if self.version != VERSION:
                warning(
                    "the database version is old, some information may be missing"
                )

            self.loaded = True

        gc.enable()
def show_top_imgs(save_path='./tmp/identical_pairs.csv'):
    path_to_test_imgs = './data/test_full_size_compressed'
    path_to_hpa_imgs = './data/hpa_public_imgs'
    img_list = []
    out_df = pd.read_csv(save_path)
    df = out_df.head(144)
    for i_row, row in df.iterrows():
        info(f"({i_row}/{len(df)}) {row['test_id']}  -  {row['hpa_id']}")
        test_img = load_image(
            row['test_id'],
            config,
            resize=(512, 512),
            folder=path_to_test_imgs,
            extension='jpg',
            channel=None,
        ) / 255.
        hpa_img = load_image(
            row['hpa_id'],
            config,
            resize=(512, 512),
            folder=path_to_hpa_imgs,
            extension='jpg',
            channel=None,
        ) / 255.
        test_img_resized = cv2.resize(test_img, (16, 16))
        # test_img_resized[:, :, 1] = 0
        hpa_img_resized = cv2.resize(hpa_img, (16, 16))
        # hpa_img_resized[:, :, 1] = 0
        diff_img = test_img_resized - hpa_img_resized
        img_list.append(
            (test_img,
             f"{row['test_id']} ({test_img.shape}, avg: {test_img.mean():.3f})"
             ))
        img_list.append((
            hpa_img,
            f"{row['hpa_id']}\n({hpa_img.shape}, avg: {hpa_img.mean():.3f}, {row['trans']}, max_error: {row['max_error']:.3f})"
        ))
        img_list.append((
            diff_img / 2 + 0.5,
            f"test - hpa (max: {diff_img.max():.3f}, min: {diff_img.min():.3f})"
        ))
    display_imgs(img_list, dpi=50, n_cols=27, save_as='./tmp/comparison.jpg')
def hires_compare(
    n_threads=70,
    path_to_test_anno='./data/sample_submission.csv',
    path_to_test_imgs='./data/test_full_size_compressed',
    path_to_hpa_anno='./data/hpa_public_imgs.csv',
    path_to_hpa_imgs='./data/hpa_public_imgs',
):
    pairs_anno = pd.read_csv('./tmp/identical_pairs_bk.csv')
    out_rows = []
    task_list = [row.to_dict() for i_row, row in pairs_anno.iterrows()]
    with Pool(n_threads) as p:
        result_iter = p.imap_unordered(hires_compare_one, task_list)
        for i_result, result in enumerate(result_iter):
            info(
                f"({i_result}/{len(task_list)}) {result['test_id']}  -  {result['hpa_id']}"
            )
            out_rows.append(result)
    out_anno = pd.DataFrame.from_records(out_rows)
    out_anno = out_anno.sort_values('relaive_max_error')
    out_anno.to_csv('./tmp/identical_pairs.csv', index=False)
Пример #11
0
    def load(self, filename):
        gc.disable()

        self.__init_vars()

        dirname = os.path.dirname(filename)
        self.path = dirname + "/" if dirname != "" else ""
        self.path +=  "." + os.path.basename(filename) + ".db"

        if os.path.exists(self.path):
            info("open database %s" % self.path)

            fd = open(self.path, "rb")

            data = self.__check_old_json_db(fd)
            if data is None:
                data = fd.read()
                if data.startswith(b"ZLIB"):
                    data = zlib.decompress(data[4:])
                data = msgpack.unpackb(data, encoding="utf-8")
                fd.close()

            self.__load_meta(data)
            self.__load_memory(data)
            self.__load_symbols(data)
            self.__load_jmptables(data)
            self.__load_comments(data)
            self.__load_functions(data)
            self.__load_history(data)
            self.__load_xrefs(data)
            self.__load_imports(data)

            if self.version <= 1.5:
                self.__load_labels(data)

            if self.version != VERSION:
                warning("the database version is old, some information may be missing")

            self.loaded = True

        gc.enable()
Пример #12
0
def parse(var):
    full = "".join(var)
    full = full.replace("\n", "")
    full = re.sub(" +", " ", full)
    full = full.rstrip()
    utils.info(full)
    print len(full)
    major = int(full[-17:-11].replace(" ", ""), 16)
    minor = int(full[-11:-6].replace(" ", ""), 16)
    temp = minor / 10.0
    utils.info("Major: " + str(major))
    utils.info("Minor: " + str(minor))
    utils.send_to_cloud(1, minor, major)  # 1 represents key for temperature in temp * 100 format
Пример #13
0
import argparse

me = singleton.SingleInstance() # will sys.exit(-1) if other instance is running

parser = argparse.ArgumentParser(description='Gets gps location data from local gpsd server and sends them to remote server.')

parser.add_argument('-s', '--send',    action="store_true", help='Send to server, otherwise just test reading the sensors.')
parser.add_argument('-v', '--verbose', action="store_true", help='Print out more info.')

utils.args = parser.parse_args()

# Listen on port 2947 (gpsd) of localhost
session = gps.gps('localhost', 2947)
session.stream(gps.WATCH_ENABLE | gps.WATCH_NEWSTYLE)

utils.info("Starting gps poller")

while True:
    try:
        utils.info("Starting try...")
        report = session.next()
        utils.info(report);
        if report['class'] == 'TPV':
            utils.info(report)
            if hasattr(report, 'time'):
                error = 0.0
                if hasattr(report, 'epx'):
                    error = (report.epx + report.epy) / 2.0
                location = str(report.lat) + ',' + str(report.lon) + ',' + str(error)
                utils.send_to_cloud("location", location)
                time.sleep(60) # sleep 60 seconds after reporting location
Пример #14
0
import os
import sys
from lib import GlobalContext, AddrContext
from lib.utils import info, die
from lib.ui.vim import generate_vim_syntax

# Generates the file custom_colors.py at the beginning
import lib.colors

if __name__ == '__main__':
    gctx = GlobalContext()
    gctx.parse_args()

    if gctx.color and lib.colors.VERSION < lib.colors.CURR_VERSION:
        info("There is a new version of custom_colors.py. If you did any")
        info("modifications you can delete it. Otherwise you can copy it")
        info("somewhere, run again your command then merge the file at hand.")
        die()

    if gctx.interactive_mode:
        from lib.ui.console import Console
        i = Console(gctx)

    elif gctx.filename is not None:
        if not gctx.load_file():
            die()

        if gctx.list_sections:
            for s in gctx.dis.binary.iter_sections():
                s.print_header()
Пример #15
0
utils.args = parser.parse_args()


def parse(var):
    full = "".join(var)
    full = full.replace("\n", "")
    full = re.sub(" +", " ", full)
    full = full.rstrip()
    utils.info(full)
    print len(full)
    major = int(full[-17:-11].replace(" ", ""), 16)
    minor = int(full[-11:-6].replace(" ", ""), 16)
    temp = minor / 10.0
    utils.info("Major: " + str(major))
    utils.info("Minor: " + str(minor))
    utils.send_to_cloud(1, minor, major)  # 1 represents key for temperature in temp * 100 format


repeat = 1

while repeat:
    data = [0, 0, 0]
    data[0] = sys.stdin.readline()
    if data[0][0] == ">":
        data[1] = sys.stdin.readline()
        data[2] = sys.stdin.readline()
        if len("".join(data)) == 144:
            utils.info("Parsing data...")
            parse(data)
Пример #16
0
    def __init__(self, ctx):
        self.ctx = ctx
        ctx.vim = False

        self.COMMANDS_ALPHA = [
            "calls",
            "da",
            "db",
            "dd",
            "dw",
            "dq",
            "dump",
            "exit",
            "functions",
            "help",
            "info",
            "jmptable",
            "load",
            "lrawarm",
            "lrawmips",
            "lrawmips64",
            "lrawx86",
            "lrawx64",
            "mips_set_gp",
            "py",
            "save",
            "sections",
            "sym",
            "x",
            "v",
            "display.print_section",
            "display.print_comments",
        ]

        self.COMMANDS = {
            "help":
            Command(0, self.__exec_help, None, ["", "Display this help"]),
            "save":
            Command(0, self.__exec_save, None, [
                "",
                "Save the database (only symbols and history currently).",
            ]),
            "load":
            Command(1, self.__exec_load, self.__complete_load, [
                "filename",
                "Load a new binary file.",
            ]),
            "lrawx86":
            Command(1, self.__exec_lrawx86, self.__complete_load, [
                "filename",
                "Load a x86 raw file.",
            ]),
            "lrawx64":
            Command(1, self.__exec_lrawx64, self.__complete_load, [
                "filename",
                "Load a x64 raw file.",
            ]),
            "lrawarm":
            Command(1, self.__exec_lrawarm, self.__complete_load, [
                "filename",
                "Load a ARM raw file.",
            ]),
            "lrawmips":
            Command(1, self.__exec_lrawmips, self.__complete_load, [
                "filename",
                "Load a MIPS raw file.",
            ]),
            "lrawmips64":
            Command(1, self.__exec_lrawmips64, self.__complete_load, [
                "filename",
                "Load a MIPS64 raw file.",
            ]),
            "x":
            Command(1, self.__exec_x, self.__complete_x, [
                "[SYMBOL|0xXXXX|EP]",
                "Decompile and print on stdout. By default it will be main.",
                "The decompilation is forced, it dosn't check if addresses",
                "are defined as code."
            ]),
            "v":
            Command(1, self.__exec_v, self.__complete_x, [
                "[SYMBOL|0xXXXX|EP]",
                "Visual mode",
                "Shortcuts:",
                "g       top",
                "G       bottom",
                "z       set current line on the middle",
                "q       quit",
                ";       edit inline comment (enter/escape to validate/cancel)",
                "%       goto next bracket",
                "*       highlight current word (ctrl-k to clear)",
                "tab     switch between dump/decompilation",
                "enter   follow address",
                "escape  go back",
                "u       re-enter (for undo)",
            ]),
            "da":
            Command(2, self.__exec_data, self.__complete_x, [
                "SYMBOL|0xXXXX|EP [NB_LINES]",
                "Print data in ascii, it stops when the end of the section is found",
            ]),
            "db":
            Command(2, self.__exec_data, self.__complete_x, [
                "SYMBOL|0xXXXX|EP [NB_LINES]",
                "Print data in bytes, it stops when the end of the section is found",
            ]),
            "dd":
            Command(2, self.__exec_data, self.__complete_x, [
                "SYMBOL|0xXXXX|EP [NB_LINES]",
                "Print data in dwords, it stops when the end of the section is found",
            ]),
            "dw":
            Command(2, self.__exec_data, self.__complete_x, [
                "SYMBOL|0xXXXX|EP [NB_LINES]",
                "Print data in words, it stops when the end of the section is found",
            ]),
            "dq":
            Command(2, self.__exec_data, self.__complete_x, [
                "SYMBOL|0xXXXX|EP [NB_LINES]",
                "Print data in qwords, it stops when the end of the section is found",
            ]),

            # by default it will be ctx.lines
            "dump":
            Command(2, self.__exec_dump, self.__complete_x, [
                "SYMBOL|0xXXXX|EP [NB_LINES]",
                "Disassemble only.",
            ]),
            "set":
            Command(3, None, None, ["", "Set options"]),
            "sym":
            Command(3, self.__exec_sym, self.__complete_x, [
                "[SYMBOL 0xXXXX] [| FILTER]",
                "Print all symbols or set a new symbol.",
                "You can filter symbols by searching the word FILTER.",
                "If FILTER starts with -, the match is inversed."
            ]),
            "calls":
            Command(1, self.__exec_calls, self.__complete_x, [
                "[SECTION_NAME]",
                "Print all calls which are in the given section"
            ]),
            "exit":
            Command(0, self.__exec_exit, None, ["", "Exit"]),
            "sections":
            Command(0, self.__exec_sections, None, [
                "",
                "Print all sections",
            ]),
            "info":
            Command(0, self.__exec_info, None,
                    ["", "Information about the current binary"]),
            "display.print_section":
            Command(0, self.__exec_display_print_section, None,
                    ["", "Print or not section when an address is found"]),
            "display.print_comments":
            Command(0, self.__exec_display_print_comments, None,
                    ["", "Print or not comments"]),
            "jmptable":
            Command(4, self.__exec_jmptable, None, [
                "INST_ADDR TABLE_ADDR NB_ENTRIES SIZE_ENTRY",
                "Create a jump table referenced at TABLE_ADDR and called",
                "from INST_ADDR."
            ]),
            "py":
            Command(0, self.__exec_py, None,
                    ["", "Run an interactive python shell."]),
            "mips_set_gp":
            Command(1, self.__exec_mips_set_gp, None,
                    ["ADDR", "Set the register $gp to a fixed value."]),
            "functions":
            Command(1, self.__exec_functions, None,
                    ["", "Print the functions list."]),
        }

        self.analyzer = Analyzer()
        self.analyzer.start()
        info("analyzer is running in background...")

        rl = ReadLine(self.exec_command, self.complete, self.send_control_c)
        self.rl = rl

        if ctx.filename is not None:
            self.__exec_load(["", ctx.filename])

        if ctx.entry is not None:
            self.__exec_x(["", ctx.entry])

        rl.reload_cursor_line()

        while 1:
            rl.loop()
            if self.ctx.db is None or not self.ctx.db.modified:
                break
            print("the database was modified, run save or exit to force")

        self.analyzer.msg.put("exit")
Пример #17
0
import os
import sys
from lib import GlobalContext, AddrContext
from lib.utils import info, die
from lib.ui.vim import generate_vim_syntax

# Generates the file custom_colors.py at the beginning
import lib.colors

if __name__ == '__main__':
    gctx = GlobalContext()
    gctx.parse_args()

    if gctx.color and lib.colors.VERSION < lib.colors.CURR_VERSION:
        info("There is a new version of custom_colors.py. If you did any")
        info("modifications you can delete it. Otherwise you can copy it")
        info("somewhere, run again your command then merge the file at hand.")
        die()

    if gctx.interactive_mode:
        from lib.ui.console import Console
        i = Console(gctx)

    elif gctx.filename is not None:
        if not gctx.load_file():
            die()

        if gctx.list_sections:
            for s in gctx.dis.binary.iter_sections():
                s.print_header()
Пример #18
0
import argparse

me = singleton.SingleInstance() # will sys.exit(-1) if other instance is running

parser = argparse.ArgumentParser(description='Outputs everything that comes via serial line.'
             + 'and writes into a log file in /tmp directory')

parser.add_argument('-v', '--verbose', action="store_true", help='Print out more info.')

utils.args = parser.parse_args()

serial_dev = '/dev/ttyACM0'

ser = serial.Serial(serial_dev, 9600)

utils.info(serial_dev + " initialized. Getting data...")
time.sleep(3)

filename = "env_" + str(time.time()) + ".log"
myFile = open('/tmp/' + filename, 'a')
print "Writing log into /tmp/" + filename

while True:
    out = ser.readline().decode('utf-8')
    m = re.match( r"[\w\%\.]+", out)
    if not (hasattr(m, "group")):
        continue 
    out = m.group(0)
    if (len(out) >= 2):
        val = out[:-1]
        if out[-1] == "C":
Пример #19
0
def load_file(ctx):
    if not os.path.exists(ctx.filename):
        error("file {ctx.filename} doesn't exist".format(ctx=ctx))
        if ctx.interactive:
            return False
        die()

    if not os.path.isfile(ctx.filename):
        error("this is not a file".format(ctx=ctx))
        if ctx.interactive:
            return False
        die()

    dirname = os.path.dirname(ctx.filename)
    db_path = dirname + "/" if dirname != "" else ""
    db_path += "." + os.path.basename(ctx.filename) + ".db"
    db_exists = os.path.exists(db_path)
    ctx.db_path = db_path

    jmptables = {}
    inline_comments = {}
    previous_comments = {}
    sym = {}
    rev_sym = {}
    mips_gp = -1

    # Open the database
    if db_exists:
        info("open database %s" % db_path)
        fd = open(db_path, "r")
        db = json.loads(fd.read())
        ctx.db = db

        # Saved symbols
        sym = db["symbols"]
        for name, addr in db["symbols"].items():
            rev_sym[addr] = name

        try:
            # Saved comments
            for ad, comm in db["inline_comments"].items():
                inline_comments[int(ad)] = comm
            for ad, comm in db["previous_comments"].items():
                previous_comments[int(ad)] = comm

            # Saved jmptables
            for j in db["jmptables"]:
                jmptables[j["inst_addr"]] = \
                    Jmptable(j["inst_addr"], j["table_addr"], j["table"], j["name"])
        except:
            # Not available in previous versions, this try will be
            # removed in the future
            pass

        try:
            mips_gp = db["mips_gp"]
        except:
            # Not available in previous versions, this try will be
            # removed in the future
            pass

        fd.close()

    try:
        dis = Disassembler(ctx.filename,
                           ctx.raw_type,
                           ctx.raw_base,
                           ctx.raw_big_endian,
                           sym,
                           rev_sym,
                           jmptables,
                           inline_comments,
                           previous_comments,
                           load_symbols=not db_exists,
                           mips_gp=mips_gp)
    except ExcArch as e:
        error("arch %s is not supported" % e.arch)
        if ctx.interactive:
            return False
        die()
    except ExcFileFormat:
        error("the file is not PE or ELF binary")
        if ctx.interactive:
            return False
        die()
    except ExcPEFail as e:
        error(str(e.e))
        error(
            "it seems that there is a random bug in pefile, you shoul retry.")
        error("please report here https://github.com/joelpx/reverse/issues/16")
        if ctx.interactive:
            return False
        die()

    ctx.dis = dis
    ctx.libarch = dis.load_arch_module()

    return True
Пример #20
0
    def __init__(self, ctx):
        self.ctx = ctx
        ctx.vim = False

        self.COMMANDS_ALPHA = [
            "calls",
            "da",
            "db",
            "dd",
            "dw",
            "dq",
            "dump",
            "exit",
            "functions",
            "help",
            "history",
            "info",
            "jmptable",
            "load",
            "lrawarm",
            "lrawmips",
            "lrawmips64",
            "lrawx86",
            "lrawx64",
            "mips_set_gp",
            "py",
            "save",
            "sections",
            "sym",
            "x",
            "v",
            "display.print_section",
            "display.print_comments",
        ]

        self.COMMANDS = {
            "help": Command(
                0,
                self.__exec_help,
                None,
                [
                "",
                "Display this help"
                ]
            ),

            "history": Command(
                0,
                self.__exec_history,
                None,
                [
                "",
                "Display the command history",
                ]
            ),

            "save": Command(
                0,
                self.__exec_save,
                None,
                [
                "",
                "Save the database (only symbols and history currently).",
                ]
            ),

            "load": Command(
                1,
                self.__exec_load,
                self.__complete_load,
                [
                "filename",
                "Load a new binary file.",
                ]
            ),

            "lrawx86": Command(
                1,
                self.__exec_lrawx86,
                self.__complete_load,
                [
                "filename",
                "Load a x86 raw file.",
                ]
            ),

            "lrawx64": Command(
                1,
                self.__exec_lrawx64,
                self.__complete_load,
                [
                "filename",
                "Load a x64 raw file.",
                ]
            ),

            "lrawarm": Command(
                1,
                self.__exec_lrawarm,
                self.__complete_load,
                [
                "filename",
                "Load a ARM raw file.",
                ]
            ),

            "lrawmips": Command(
                1,
                self.__exec_lrawmips,
                self.__complete_load,
                [
                "filename",
                "Load a MIPS raw file.",
                ]
            ),

            "lrawmips64": Command(
                1,
                self.__exec_lrawmips64,
                self.__complete_load,
                [
                "filename",
                "Load a MIPS64 raw file.",
                ]
            ),

            "x": Command(
                1,
                self.__exec_x,
                self.__complete_x,
                [
                "[SYMBOL|0xXXXX|EP]",
                "Decompile and print on stdout. By default it will be main.",
                "The decompilation is forced, it dosn't check if addresses",
                "are defined as code."
                ]
            ),

            "v": Command(
                1,
                self.__exec_v,
                self.__complete_x,
                [
                "[SYMBOL|0xXXXX|EP]",
                "Visual mode",
                "Shortcuts:",
                "c       create code",
                "p       create function",
                "g       top",
                "G       bottom",
                "z       set current line on the middle",
                "q       quit",
                ";       edit inline comment (enter/escape to validate/cancel)",
                "%       goto next bracket",
                "*       highlight current word (ctrl-k to clear)",
                "{ }     previous/next paragraph",
                "tab     switch between dump/decompilation",
                "enter   follow address",
                "escape  go back",
                "u       re-enter (for undo)",
                ]
            ),

            "da": Command(
                2,
                self.__exec_data,
                self.__complete_x,
                [
                "SYMBOL|0xXXXX|EP [NB_LINES]",
                "Print data in ascii, it stops when the end of the section is found",
                ]
            ),

            "db": Command(
                2,
                self.__exec_data,
                self.__complete_x,
                [
                "SYMBOL|0xXXXX|EP [NB_LINES]",
                "Print data in bytes, it stops when the end of the section is found",
                ]
            ),

            "dd": Command(
                2,
                self.__exec_data,
                self.__complete_x,
                [
                "SYMBOL|0xXXXX|EP [NB_LINES]",
                "Print data in dwords, it stops when the end of the section is found",
                ]
            ),

            "dw": Command(
                2,
                self.__exec_data,
                self.__complete_x,
                [
                "SYMBOL|0xXXXX|EP [NB_LINES]",
                "Print data in words, it stops when the end of the section is found",
                ]
            ),

            "dq": Command(
                2,
                self.__exec_data,
                self.__complete_x,
                [
                "SYMBOL|0xXXXX|EP [NB_LINES]",
                "Print data in qwords, it stops when the end of the section is found",
                ]
            ),

            # by default it will be ctx.lines
            "dump": Command(
                2,
                self.__exec_dump,
                self.__complete_x,
                [
                "SYMBOL|0xXXXX|EP [NB_LINES]",
                "Disassemble only.",
                ]
            ),

            "set": Command(
                3,
                None,
                None,
                [
                "",
                "Set options"
                ]
            ),

            "sym": Command(
                3,
                self.__exec_sym,
                self.__complete_x,
                [
                "[SYMBOL 0xXXXX] [| FILTER]",
                "Print all symbols or set a new symbol.",
                "You can filter symbols by searching the word FILTER.",
                "If FILTER starts with -, the match is inversed."
                ]
            ),

            "calls": Command(
                1,
                self.__exec_calls,
                self.__complete_x,
                [
                "[SECTION_NAME]",
                "Print all calls which are in the given section"
                ]
            ),

            "exit": Command(
                0,
                self.__exec_exit,
                None,
                [
                "",
                "Exit"
                ]
            ),

            "sections": Command(
                0,
                self.__exec_sections,
                None,
                [
                "",
                "Print all sections",
                ]
            ),

            "info": Command(
                0,
                self.__exec_info,
                None,
                [
                "",
                "Information about the current binary"
                ]
            ),

            "display.print_section": Command(
                0,
                self.__exec_display_print_section,
                None,
                [
                "",
                "Print or not section when an address is found"
                ]
            ),

            "display.print_comments": Command(
                0,
                self.__exec_display_print_comments,
                None,
                [
                "",
                "Print or not comments"
                ]
            ),

            "jmptable": Command(
                4,
                self.__exec_jmptable,
                None,
                [
                "INST_ADDR TABLE_ADDR NB_ENTRIES SIZE_ENTRY",
                "Create a jump table referenced at TABLE_ADDR and called",
                "from INST_ADDR."
                ]
            ),

            "py": Command(
                0,
                self.__exec_py,
                None,
                [
                "",
                "Run an interactive python shell."
                ]
            ),

            "mips_set_gp": Command(
                1,
                self.__exec_mips_set_gp,
                None,
                [
                "ADDR",
                "Set the register $gp to a fixed value."
                ]
            ),

            "functions": Command(
                1,
                self.__exec_functions,
                None,
                [
                "",
                "Print the function list."
                ]
            ),
        }

        self.analyzer = Analyzer()
        self.analyzer.start()
        info("analyzer is running in background...")

        rl = ReadLine(self.exec_command, self.complete, self.send_control_c)
        self.rl = rl

        if ctx.filename is not None:
            self.__exec_load(["", ctx.filename])

        if ctx.entry is not None:
            self.__exec_x(["", ctx.entry])

        rl.reload_cursor_line()

        while 1:
            rl.loop()
            if not self.check_db_modified():
                break

        self.analyzer.msg.put("exit")
Пример #21
0
me = singleton.SingleInstance() # will sys.exit(-1) if other instance is running

parser = argparse.ArgumentParser(description='Requests data from Arduino via serial interface and sends the data to remote server.')

parser.add_argument('-s', '--send',    action="store_true", help='Send to server, otherwise just test reading the sensors.')
parser.add_argument('-v', '--verbose', action="store_true", help='Print out more info.')

utils.args = parser.parse_args()

#serial_dev = '/dev/ttyACM0'
#serial_dev = '/dev/tty.usbserial-A9007KLg'
serial_dev = '/dev/ttyAMA0'

ser = serial.Serial(serial_dev, 9600)

utils.info(serial_dev + " initialized. Getting data...")
time.sleep(3)

ser.write("2")
out = ser.readline()
light = 100.0 / 1023.0 * float(re.split(':|\\n', out)[2])
utils.info(light)
utils.send_to_cloud("light", light)

sleep = 12
ser.write("0")
time.sleep(sleep)
ser.write("1")
out = ser.readline()
count = re.split(':|\\n', out)[2]
freq = float(count) / float(sleep) / 2.0
Пример #22
0
def load_file(ctx):
    if not os.path.exists(ctx.filename):
        error("file {ctx.filename} doesn't exist".format(ctx=ctx))
        if ctx.interactive:
           return False
        die()

    if not os.path.isfile(ctx.filename):
        error("this is not a file".format(ctx=ctx))
        if ctx.interactive:
           return False
        die()

    dirname = os.path.dirname(ctx.filename)
    db_path = dirname + "/" if dirname != "" else ""
    db_path +=  "." + os.path.basename(ctx.filename) + ".db"
    db_exists = os.path.exists(db_path)
    ctx.db_path = db_path

    jmptables = {}
    inline_comments = {}
    previous_comments = {}
    sym = {}
    rev_sym = {}
    mips_gp = -1

    # Open the database
    if db_exists:
        info("open database %s" % db_path)
        fd = open(db_path, "r")
        db = json.loads(fd.read())
        ctx.db = db

        # Saved symbols
        sym = db["symbols"]
        for name, addr in db["symbols"].items():
            rev_sym[addr] = name

        try:
            # Saved comments
            for ad, comm in db["inline_comments"].items():
                inline_comments[int(ad)] = comm
            for ad, comm in db["previous_comments"].items():
                previous_comments[int(ad)] = comm

            # Saved jmptables
            for j in db["jmptables"]:
                jmptables[j["inst_addr"]] = \
                    Jmptable(j["inst_addr"], j["table_addr"], j["table"], j["name"])
        except:
            # Not available in previous versions, this try will be
            # removed in the future
            pass

        try:
            mips_gp = db["mips_gp"]
        except:
            # Not available in previous versions, this try will be
            # removed in the future
            pass

        fd.close()

    try:
        dis = Disassembler(ctx.filename, ctx.raw_type,
                           ctx.raw_base, ctx.raw_big_endian,
                           sym, rev_sym,
                           jmptables, inline_comments,
                           previous_comments,
                           load_symbols=not db_exists,
                           mips_gp=mips_gp)
    except ExcArch as e:
        error("arch %s is not supported" % e.arch)
        if ctx.interactive:
            return False
        die()
    except ExcFileFormat:
        error("the file is not PE or ELF binary")
        if ctx.interactive:
            return False
        die()
    except ExcPEFail as e:
        error(str(e.e))
        error("it seems that there is a random bug in pefile, you shoul retry.")
        error("please report here https://github.com/joelpx/reverse/issues/16")
        if ctx.interactive:
            return False
        die()

    ctx.dis = dis
    ctx.libarch = dis.load_arch_module()

    return True
Пример #23
0
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.    See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program.    If not, see <http://www.gnu.org/licenses/>.
#

from lib import reverse, parse_args
from lib.utils import info, die

# Generates the file custom_colors.py at the beginning
import lib.colors

if __name__ == '__main__':
    ctx = parse_args()

    if ctx.color and lib.colors.VERSION < 1.3:
        info("There is a new version of custom_colors.py. If it's wasn't")
        info("modified you can delete it. Otherwise you can copy it")
        info("somewhere, run again your command then merge the file at hand.")
        die()

    if ctx.interactive_mode:
        from lib.ui.console import Console
        i = Console(ctx)
    elif ctx.filename is not None:
        reverse(ctx)
def crop(config):
    if config['output_windowed_imgs_path'] is not None:
        makedirs(config['output_windowed_imgs_path'], exist_ok=True)

    if type(config['set_n_windows']) is str:
        set_n_windows_anno = pd.read_csv(config['set_n_windows'], index_col=0)

        n_classes = 28

        xs = []
        for target_str in set_n_windows_anno['Target']:
            targets = str_to_labels(target_str)
            x = np.zeros(n_classes, dtype='int')
            x[targets] = 1
            xs.append(x)
        xx = np.array(xs)
        n_samples_per_class = np.sum(xx, axis=0)
        cut_summary = pd.DataFrame(
            {
                'organelle': class_labels,
                'n_samples': n_samples_per_class,
                'n_windows':
                np.round(1500 / n_samples_per_class).astype(int) + 1
            },
            index=range(n_classes),
        )
        print(cut_summary)
        estimated_n_windows = np.sum(cut_summary['n_samples'].values *
                                     cut_summary['n_windows'].values)
        print(f'estimated_n_windows = {estimated_n_windows}')

    def determine_n_windows_fn(id_):
        if type(config['set_n_windows']) is str:
            targets = str_to_labels(set_n_windows_anno.loc[id_, 'Target'])
            n_windows = np.max(cut_summary.iloc[targets]['n_windows'].values)
            return n_windows
        else:
            return config['set_n_windows']

    anno = config['anno'].copy()
    anno['n_windows'] = [determine_n_windows_fn(id_) for id_ in anno.index]

    crop_task_list = [{
        'id_': id_,
        'row': row,
        'config': config,
    } for id_, row in anno.iterrows()]

    with Pool(config['n_threads']) as p:
        result_iter = p.imap_unordered(crop_one_id, crop_task_list)

        result_list = []
        for i_result, result in enumerate(result_iter):
            info(
                f"({i_result}/{len(crop_task_list)}) {result['id_']}  ->  ({len(result['df'])})"
            )
            result_list.append(result)

    if config['output_windowed_imgs_path'] is not None:
        windowed_anno = pd.concat([x['df'] for x in result_list])
        print(windowed_anno)
        if 'ERROR' in windowed_anno['left']:
            warn(f'There were errors!')
        windowed_anno.to_csv(config['output_windowed_anno_csv_path'])

    def save_collage(field):
        display_imgs([x[field] for x in result_list],
                     save_as=pjoin(config['collage_output_path'],
                                   f"{config['run_tag']}-0-{field}.jpg"))

    if config['collage_output_path'] is not None:
        save_collage('blue_channel')
        save_collage('thresholded_img')
        save_collage('labeled_img')
        save_collage('minimap')