Exemplo n.º 1
0
def funcs(request):
    """
    Returns: a FunExtract object extracted from a file
    """
    settings.import_config()
    f = open(dir_path + request.param[0], 'r')
    cfg = tac_cfg.TACGraph.from_bytecode(f.read())
    dataflow.analyse_graph(cfg)
    fun_extractor = function.FunctionExtractor(cfg)
    fun_extractor.extract()
    return fun_extractor, request.param[1], request.param[2], request.param[3]
Exemplo n.º 2
0
def vandal_cfg(**kwargs):
    # logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
    # logging.getLogger().setLevel(logging.INFO)
    # logging.info("asd")
    settings.import_config(settings._CONFIG_LOC_)
    cfg = tac_cfg.TACGraph.from_bytecode(kwargs['input'])

    settings.import_config(settings._CONFIG_LOC_)

    dataflow.analyse_graph(cfg)

    kwargs['res']['res'] = exporter.CFGStringExporter(cfg).export()
Exemplo n.º 3
0
def analyse_contract(job_index: int, index: int, filename: str, result_queue) -> None:
    """
    Perform dataflow analysis on a contract, storing the result in the queue.
    This is a worker function to be passed to a subprocess.

    Args:
        job_index: the job number for this invocation of analyse_contract
        index: the number of the particular contract being analysed
        filename: the location of the contract bytecode file to process
        result_queue: a multiprocessing queue in which to store the analysis results
    """

    try:
        with open(join(args.contract_dir, filename)) as file:
            # Decompile and perform dataflow analysis upon the given graph
            decomp_start = time.time()
            cfg = tac_cfg.TACGraph.from_bytecode(file)
            analytics = dataflow.analyse_graph(cfg)

            # Export relations to temp working directory
            backup_and_empty_working_dir(job_index)
            work_dir = working_dir(job_index)
            out_dir = working_dir(job_index, True)
            exporter.CFGTsvExporter(cfg).export(output_dir=work_dir,
                                                dominators=DOMINATORS,
                                                out_opcodes=OPCODES)
            contract_filename = os.path.join(os.path.join(os.getcwd(), args.contract_dir), filename)
            with open(os.path.join(work_dir, 'contract_filename.txt'),'w') as f:
                f.write(contract_filename)
            os.symlink(contract_filename, os.path.join(os.getcwd(),os.path.join(work_dir, 'contract.hex')))
            # Run souffle on those relations
            souffle_start = time.time()
            analysis_args = [os.path.join(os.getcwd(), DEFAULT_SOUFFLE_EXECUTABLE),
                             "--facts={}".format(work_dir),
                             "--output={}".format(out_dir)
            ]
            subprocess.run(analysis_args)

            # Collect the results and put them in the result queue
            vulns = []
            for fname in os.listdir(out_dir):
                fpath = join(out_dir, fname)
                if os.path.getsize(fpath) != 0:
                    vulns.append(fname.split(".")[0])

            meta = []
            if cfg.has_unresolved_jump:
                meta.append("unresolved")

            # Decompile + Analysis time
            decomp_time = souffle_start - decomp_start
            souffle_time = time.time() - souffle_start
            log("{}: {:.20}... completed in {:.2f} + {:.2f} secs".format(index, filename,
                                                                         decomp_time,
                                                                         souffle_time))

            analytics["decomp_time"] = decomp_time
            analytics["souffle_time"] = souffle_time
            get_gigahorse_analytics(out_dir, analytics)
            result_queue.put((filename, vulns, meta, analytics))

    except Exception as e:
        log("Error: {}".format(e))
        result_queue.put((filename, [], ["error"], {}))
Exemplo n.º 4
0
    logging.critical("\nInterrupted by user")
    sys.exit(1)

# Initialise data flow settings.
settings.import_config(args.config_file)

# Override config file with any provided settings.
if args.config is not None:
    pairs = [
        pair.split("=") for pair in args.config.replace(" ", "").split(",")
    ]
    for k, v in pairs:
        settings.set_from_string(k, v)

# Run data flow analysis
dataflow.analyse_graph(cfg)

# Generate output using the requested exporter(s)
if not args.no_out:
    logging.info("Writing string output.")
    print(exporter.CFGStringExporter(cfg).export(), file=args.outfile)

if args.graph is not None:
    exporter.CFGDotExporter(cfg).export(args.graph)

if args.tsv is not None:
    logging.info("Writing TSV output.")
    exporter.CFGTsvExporter(cfg).export(output_dir=args.tsv,
                                        dominators=args.dominators,
                                        out_opcodes=args.opcodes)
Exemplo n.º 5
0
def analyse_contract(job_index: int, index: int, filename: str, result_queue,
                     timeout: int) -> None:
    """
    Perform dataflow analysis on a contract, storing the result in the queue.
    This is a worker function to be passed to a subprocess.

    Args:
        job_index: the job number for this invocation of analyse_contract
        index: the number of the particular contract being analysed
        filename: the location of the contract bytecode file to process
        result_queue: a multiprocessing queue in which to store the analysis results
    """
    global souffle_proc
    try:
        with open(join(args.contract_dir, filename)) as file:
            # Decompile and perform dataflow analysis upon the given graph
            decomp_start = time.time()
            cfg = tac_cfg.TACGraph.from_bytecode(file)
            analytics = dataflow.analyse_graph(cfg)

            # Export relations to temp working directory
            empty_working_dir(job_index)
            work_dir = working_dir(job_index)
            out_dir = working_dir(job_index, True)
            exporter.CFGTsvExporter(cfg).export(output_dir=work_dir,
                                                dominators=DOMINATORS,
                                                out_opcodes=OPCODES)

            # Run souffle on those relations
            souffle_start = time.time()
            souffle_args = [
                args.souffle_bin, "--fact-dir={}".format(work_dir),
                "--output-dir={}".format(out_dir), args.spec.name
            ]
            if args.compile_souffle:
                souffle_args.append("--compile")
            souffle_proc = subprocess.Popen(souffle_args)
            souffle_proc.communicate(timeout=timeout)

            # Collect the results and put them in the result queue
            vulns = []
            for fname in os.listdir(out_dir):
                fpath = join(out_dir, fname)
                if os.path.getsize(fpath) != 0:
                    vulns.append(fname.split(".")[0])

            meta = []
            if cfg.has_unresolved_jump:
                meta.append("unresolved")

            # Decompile + Analysis time
            decomp_time = souffle_start - decomp_start
            souffle_time = time.time() - souffle_start
            log("{}: {:.20}... completed in {:.2f} + {:.2f} secs".format(
                index, filename, decomp_time, souffle_time))

            analytics["decomp_time"] = decomp_time
            analytics["souffle_time"] = souffle_time

            result_queue.put((filename, vulns, meta, analytics))

    except subprocess.TimeoutExpired as e:
        souffle_proc.terminate()
        log("{} timed out after {} secs (limit {} secs).".format(
            filename,
            time.time() - souffle_start, e.timeout))
        res_queue.put((filename, [], ["TIMEOUT"], {}))

    except Exception as e:
        log("Error ({}): {}".format(filename, e))
        result_queue.put((filename, [], ["error"], {}))