Exemple #1
0
def main() -> None:

    l = logging.getLogger("evm-cfg-builder")
    l.setLevel(logging.INFO)
    args = parse_args()

    cp: Optional[cProfile.Profile] = None
    if args.perf:
        cp = cProfile.Profile()
        cp.enable()

    if is_supported(args.filename):
        filename = args.filename
        del args.filename
        try:
            cryticCompile = CryticCompile(filename, **vars(args))
            for key, compilation_unit in cryticCompile.compilation_units.items(
            ):
                for contract in compilation_unit.contracts_names:
                    bytecode_init = compilation_unit.bytecode_init(contract)
                    if bytecode_init:
                        for signature, hash_id in compilation_unit.hashes(
                                contract).items():
                            known_hashes[hash_id] = signature
                        logger.info(f"Analyze {contract}")
                        _run(bytecode_init,
                             f"{key}-{filename}-{contract}-init", args)
                        runtime_bytecode = compilation_unit.bytecode_runtime(
                            contract)
                        if runtime_bytecode:
                            _run(runtime_bytecode,
                                 f"{key}-{filename}-{contract}-runtime", args)
                        else:
                            logger.info("Runtime bytecode not available")
        except InvalidCompilation as e:
            logger.error(e)

    else:
        with open(args.filename, "rb") as f:
            bytecode = f.read()
        logger.info(f"Analyze {args.filename}")
        _run(bytecode, args.filename, args)

    if args.perf and cp:
        cp.disable()
        stats = pstats.Stats(cp).sort_stats("cumtime")
        stats.print_stats()
Exemple #2
0
def main() -> None:
    """
    Dispatches execution into one of Manticore's engines: evm or native.
    """
    args = parse_arguments()

    if args.no_colors:
        log.disable_colors()

    sys.setrecursionlimit(consts.recursionlimit)

    set_verbosity(args.v)

    if args.argv[0].endswith(".sol") or is_supported(args.argv[0]):
        ethereum_main(args, logger)
    elif args.argv[0].endswith(".wasm") or args.argv[0].endswith(".wat"):
        wasm_main(args, logger)
    else:
        install_helper.ensure_native_deps()
        native_main(args, logger)
Exemple #3
0
def main():

    l = logging.getLogger('evm-cfg-builder')
    l.setLevel(logging.INFO)
    args = parse_args()

    if args.perf:
        cp = cProfile.Profile()
        cp.enable()

    if is_supported(args.filename):
        filename = args.filename
        del args.filename
        try:
            cryticCompile = CryticCompile(filename, **vars(args))
            for contract in cryticCompile.contracts_names:
                bytecode_init = cryticCompile.bytecode_init(contract)
                if bytecode_init:
                    for signature, hash in cryticCompile.hashes(contract).items():
                        known_hashes[hash] = signature
                    logger.info(f'Analyze {contract}')
                    _run(bytecode_init, f'{filename}-{contract}-init', args)
                    runtime_bytecode = cryticCompile.bytecode_runtime(contract)
                    if runtime_bytecode:
                        _run(runtime_bytecode,  f'{filename}-{contract}-runtime', args)
                    else:
                        logger.info('Runtime bytecode not available')
        except InvalidCompilation as e:
            logger.error(e)

    else:
        with open(args.filename, 'rb') as f:
            bytecode = f.read()
        logger.info(f'Analyze {args.filename}')
        _run(bytecode, args.filename, args)

    if args.perf:
        cp.disable()
        stats = pstats.Stats(cp).sort_stats("cumtime")
        stats.print_stats()
Exemple #4
0
def main_impl(all_detector_classes, all_printer_classes):
    """
    :param all_detector_classes: A list of all detectors that can be included/excluded.
    :param all_printer_classes: A list of all printers that can be included.
    """
    args = parse_args(all_detector_classes, all_printer_classes)

    # Set colorization option
    set_colorization_enabled(not args.disable_color)

    # If we are outputting json to stdout, we'll want to disable any logging.
    stdout_json = args.json == "-"
    if stdout_json:
        logging.disable(logging.CRITICAL)

    printer_classes = choose_printers(args, all_printer_classes)
    detector_classes = choose_detectors(args, all_detector_classes)

    default_log = logging.INFO if not args.debug else logging.DEBUG

    for (l_name, l_level) in [
        ('Slither', default_log),
        ('Contract', default_log),
        ('Function', default_log),
        ('Node', default_log),
        ('Parsing', default_log),
        ('Detectors', default_log),
        ('FunctionSolc', default_log),
        ('ExpressionParsing', default_log),
        ('TypeParsing', default_log),
        ('SSA_Conversion', default_log),
        ('Printers', default_log),
            #('CryticCompile', default_log)
    ]:
        l = logging.getLogger(l_name)
        l.setLevel(l_level)

    console_handler = logging.StreamHandler()
    console_handler.setLevel(logging.INFO)

    console_handler.setFormatter(FormatterCryticCompile())

    crytic_compile_error = logging.getLogger(('CryticCompile'))
    crytic_compile_error.addHandler(console_handler)
    crytic_compile_error.propagate = False
    crytic_compile_error.setLevel(logging.INFO)

    try:
        filename = args.filename

        globbed_filenames = glob.glob(filename, recursive=True)

        if os.path.isfile(filename) or is_supported(filename):
            (results, number_contracts) = process(filename, args,
                                                  detector_classes,
                                                  printer_classes)

        elif os.path.isdir(filename) or len(globbed_filenames) > 0:
            extension = "*.sol" if not args.solc_ast else "*.json"
            filenames = glob.glob(os.path.join(filename, extension))
            if not filenames:
                filenames = globbed_filenames
            number_contracts = 0
            results = []
            if args.splitted and args.solc_ast:
                (results,
                 number_contracts) = process_files(filenames, args,
                                                   detector_classes,
                                                   printer_classes)
            else:
                for filename in filenames:
                    (results_tmp,
                     number_contracts_tmp) = process(filename, args,
                                                     detector_classes,
                                                     printer_classes)
                    number_contracts += number_contracts_tmp
                    results += results_tmp

        else:
            raise Exception("Unrecognised file/dir path: '#{filename}'".format(
                filename=filename))

        if args.json:
            output_json(results, None if stdout_json else args.json)
        if args.checklist:
            output_results_to_markdown(results)
        # Dont print the number of result for printers
        if number_contracts == 0:
            logger.warn(red('No contract was analyzed'))
        if printer_classes:
            logger.info('%s analyzed (%d contracts)', filename,
                        number_contracts)
        else:
            logger.info('%s analyzed (%d contracts), %d result(s) found',
                        filename, number_contracts, len(results))
        if args.ignore_return_value:
            return
        exit(results)

    except SlitherException as se:
        # Output our error accordingly, via JSON or logging.
        if stdout_json:
            print(json.dumps(wrap_json_detectors_results(False, str(se), [])))
        else:
            logging.error(red('Error:'))
            logging.error(red(se))
            logging.error(
                'Please report an issue to https://github.com/crytic/slither/issues'
            )
        sys.exit(-1)

    except Exception:
        # Output our error accordingly, via JSON or logging.
        if stdout_json:
            print(
                json.dumps(
                    wrap_json_detectors_results(False, traceback.format_exc(),
                                                [])))
        else:
            logging.error('Error in %s' % args.filename)
            logging.error(traceback.format_exc())
        sys.exit(-1)
Exemple #5
0
def main_impl(all_detector_classes, all_printer_classes):
    """
    :param all_detector_classes: A list of all detectors that can be included/excluded.
    :param all_printer_classes: A list of all printers that can be included.
    """
    # Set logger of Slither to info, to catch warnings related to the arg parsing
    logger.setLevel(logging.INFO)
    args = parse_args(all_detector_classes, all_printer_classes)

    # Set colorization option
    set_colorization_enabled(not args.disable_color)

    # Define some variables for potential JSON output
    json_results = {}
    output_error = None
    outputting_json = args.json is not None
    outputting_json_stdout = args.json == '-'
    outputting_zip = args.zip is not None
    if args.zip_type not in ZIP_TYPES_ACCEPTED.keys():
        logger.error(f'Zip type not accepted, it must be one of {",".join(ZIP_TYPES_ACCEPTED.keys())}')

    # If we are outputting JSON, capture all standard output. If we are outputting to stdout, we block typical stdout
    # output.
    if outputting_json:
        StandardOutputCapture.enable(outputting_json_stdout)

    printer_classes = choose_printers(args, all_printer_classes)
    detector_classes = choose_detectors(args, all_detector_classes)

    default_log = logging.INFO if not args.debug else logging.DEBUG

    for (l_name, l_level) in [('Slither', default_log),
                              ('Contract', default_log),
                              ('Function', default_log),
                              ('Node', default_log),
                              ('Parsing', default_log),
                              ('Detectors', default_log),
                              ('FunctionSolc', default_log),
                              ('ExpressionParsing', default_log),
                              ('TypeParsing', default_log),
                              ('SSA_Conversion', default_log),
                              ('Printers', default_log),
                              # ('CryticCompile', default_log)
                              ]:
        l = logging.getLogger(l_name)
        l.setLevel(l_level)

    console_handler = logging.StreamHandler()
    console_handler.setLevel(logging.INFO)

    console_handler.setFormatter(FormatterCryticCompile())

    crytic_compile_error = logging.getLogger(('CryticCompile'))
    crytic_compile_error.addHandler(console_handler)
    crytic_compile_error.propagate = False
    crytic_compile_error.setLevel(logging.INFO)

    results_detectors = []
    results_printers = []
    try:
        filename = args.filename

        # Determine if we are handling ast from solc
        if args.solc_ast or (filename.endswith('.json') and not is_supported(filename)):
            globbed_filenames = glob.glob(filename, recursive=True)
            filenames = glob.glob(os.path.join(filename, "*.json"))
            if not filenames:
                filenames = globbed_filenames
            number_contracts = 0

            slither_instances = []
            if args.splitted:
                (slither_instance, results_detectors, results_printers, number_contracts) = process_from_asts(filenames,
                                                                                                              args,
                                                                                                              detector_classes,
                                                                                                              printer_classes)
                slither_instances.append(slither_instance)
            else:
                for filename in filenames:
                    (slither_instance, results_detectors_tmp, results_printers_tmp,
                     number_contracts_tmp) = process_single(filename, args, detector_classes, printer_classes)
                    number_contracts += number_contracts_tmp
                    results_detectors += results_detectors_tmp
                    results_printers += results_printers_tmp
                    slither_instances.append(slither_instance)

        # Rely on CryticCompile to discern the underlying type of compilations.
        else:
            (slither_instances, results_detectors, results_printers, number_contracts) = process_all(filename, args,
                                                                                                     detector_classes,
                                                                                                     printer_classes)

        # Determine if we are outputting JSON
        if outputting_json or outputting_zip:
            # Add our compilation information to JSON
            if 'compilations' in args.json_types:
                compilation_results = []
                for slither_instance in slither_instances:
                    compilation_results.append(generate_standard_export(slither_instance.crytic_compile))
                json_results['compilations'] = compilation_results

            # Add our detector results to JSON if desired.
            if results_detectors and 'detectors' in args.json_types:
                json_results['detectors'] = results_detectors

            # Add our printer results to JSON if desired.
            if results_printers and 'printers' in args.json_types:
                json_results['printers'] = results_printers

            # Add our detector types to JSON
            if 'list-detectors' in args.json_types:
                detectors, _ = get_detectors_and_printers()
                json_results['list-detectors'] = output_detectors_json(detectors)

            # Add our detector types to JSON
            if 'list-printers' in args.json_types:
                _, printers = get_detectors_and_printers()
                json_results['list-printers'] = output_printers_json(printers)

        # Output our results to markdown if we wish to compile a checklist.
        if args.checklist:
            output_results_to_markdown(results_detectors)

        # Dont print the number of result for printers
        if number_contracts == 0:
            logger.warning(red('No contract was analyzed'))
        if printer_classes:
            logger.info('%s analyzed (%d contracts)', filename, number_contracts)
        else:
            logger.info('%s analyzed (%d contracts with %d detectors), %d result(s) found', filename,
                        number_contracts, len(detector_classes), len(results_detectors))

        logger.info(blue('Use https://crytic.io/ to get access to additional detectors and Github integration'))
        if args.ignore_return_value:
            return

    except SlitherException as se:
        output_error = str(se)
        traceback.print_exc()
        logging.error(red('Error:'))
        logging.error(red(output_error))
        logging.error('Please report an issue to https://github.com/crytic/slither/issues')

    except Exception:
        output_error = traceback.format_exc()
        logging.error(traceback.print_exc())
        logging.error('Error in %s' % args.filename)
        logging.error(output_error)


    # If we are outputting JSON, capture the redirected output and disable the redirect to output the final JSON.
    if outputting_json:
        if 'console' in args.json_types:
            json_results['console'] = {
                'stdout': StandardOutputCapture.get_stdout_output(),
                'stderr': StandardOutputCapture.get_stderr_output()
            }
        StandardOutputCapture.disable()
        output_to_json(None if outputting_json_stdout else args.json, output_error, json_results)

    if outputting_zip:
        output_to_zip(args.zip, output_error, json_results, args.zip_type)

    # Exit with the appropriate status code
    if output_error:
        sys.exit(-1)
    else:
        exit(results_detectors)
Exemple #6
0
def main_impl(all_detector_classes, all_printer_classes):
    """
    :param all_detector_classes: A list of all detectors that can be included/excluded.
    :param all_printer_classes: A list of all printers that can be included.
    """
    # Set logger of Slither to info, to catch warnings related to the arg parsing
    logger.setLevel(logging.INFO)
    args = parse_args(all_detector_classes, all_printer_classes)

    cp: Optional[cProfile.Profile] = None
    if args.perf:
        cp = cProfile.Profile()
        cp.enable()

    # Set colorization option
    set_colorization_enabled(not args.disable_color)

    # Define some variables for potential JSON output
    json_results = {}
    output_error = None
    outputting_json = args.json is not None
    outputting_json_stdout = args.json == "-"
    outputting_sarif = args.sarif is not None
    outputting_sarif_stdout = args.sarif == "-"
    outputting_zip = args.zip is not None
    if args.zip_type not in ZIP_TYPES_ACCEPTED.keys():
        to_log = f'Zip type not accepted, it must be one of {",".join(ZIP_TYPES_ACCEPTED.keys())}'
        logger.error(to_log)

    # If we are outputting JSON, capture all standard output. If we are outputting to stdout, we block typical stdout
    # output.
    if outputting_json or output_to_sarif:
        StandardOutputCapture.enable(outputting_json_stdout
                                     or outputting_sarif_stdout)

    printer_classes = choose_printers(args, all_printer_classes)
    detector_classes = choose_detectors(args, all_detector_classes)

    default_log = logging.INFO if not args.debug else logging.DEBUG

    for (l_name, l_level) in [
        ("Slither", default_log),
        ("Contract", default_log),
        ("Function", default_log),
        ("Node", default_log),
        ("Parsing", default_log),
        ("Detectors", default_log),
        ("FunctionSolc", default_log),
        ("ExpressionParsing", default_log),
        ("TypeParsing", default_log),
        ("SSA_Conversion", default_log),
        ("Printers", default_log),
            # ('CryticCompile', default_log)
    ]:
        logger_level = logging.getLogger(l_name)
        logger_level.setLevel(l_level)

    console_handler = logging.StreamHandler()
    console_handler.setLevel(logging.INFO)

    console_handler.setFormatter(FormatterCryticCompile())

    crytic_compile_error = logging.getLogger(("CryticCompile"))
    crytic_compile_error.addHandler(console_handler)
    crytic_compile_error.propagate = False
    crytic_compile_error.setLevel(logging.INFO)

    results_detectors = []
    results_printers = []
    try:
        filename = args.filename

        # Determine if we are handling ast from solc
        if args.solc_ast or (filename.endswith(".json")
                             and not is_supported(filename)):
            globbed_filenames = glob.glob(filename, recursive=True)
            filenames = glob.glob(os.path.join(filename, "*.json"))
            if not filenames:
                filenames = globbed_filenames
            number_contracts = 0

            slither_instances = []
            if args.splitted:
                (
                    slither_instance,
                    results_detectors,
                    results_printers,
                    number_contracts,
                ) = process_from_asts(filenames, args, detector_classes,
                                      printer_classes)
                slither_instances.append(slither_instance)
            else:
                for filename in filenames:
                    (
                        slither_instance,
                        results_detectors_tmp,
                        results_printers_tmp,
                        number_contracts_tmp,
                    ) = process_single(filename, args, detector_classes,
                                       printer_classes)
                    number_contracts += number_contracts_tmp
                    results_detectors += results_detectors_tmp
                    results_printers += results_printers_tmp
                    slither_instances.append(slither_instance)

        # Rely on CryticCompile to discern the underlying type of compilations.
        else:
            (
                slither_instances,
                results_detectors,
                results_printers,
                number_contracts,
            ) = process_all(filename, args, detector_classes, printer_classes)

        # Determine if we are outputting JSON
        if outputting_json or outputting_zip or output_to_sarif:
            # Add our compilation information to JSON
            if "compilations" in args.json_types:
                compilation_results = []
                for slither_instance in slither_instances:
                    compilation_results.append(
                        generate_standard_export(
                            slither_instance.crytic_compile))
                json_results["compilations"] = compilation_results

            # Add our detector results to JSON if desired.
            if results_detectors and "detectors" in args.json_types:
                json_results["detectors"] = results_detectors

            # Add our printer results to JSON if desired.
            if results_printers and "printers" in args.json_types:
                json_results["printers"] = results_printers

            # Add our detector types to JSON
            if "list-detectors" in args.json_types:
                detectors, _ = get_detectors_and_printers()
                json_results["list-detectors"] = output_detectors_json(
                    detectors)

            # Add our detector types to JSON
            if "list-printers" in args.json_types:
                _, printers = get_detectors_and_printers()
                json_results["list-printers"] = output_printers_json(printers)

        # Output our results to markdown if we wish to compile a checklist.
        if args.checklist:
            output_results_to_markdown(results_detectors, args.checklist_limit)

        # Dont print the number of result for printers
        if number_contracts == 0:
            logger.warning(red("No contract was analyzed"))
        if printer_classes:
            logger.info("%s analyzed (%d contracts)", filename,
                        number_contracts)
        else:
            logger.info(
                "%s analyzed (%d contracts with %d detectors), %d result(s) found",
                filename,
                number_contracts,
                len(detector_classes),
                len(results_detectors),
            )
        if args.ignore_return_value:
            return

    except SlitherException as slither_exception:
        output_error = str(slither_exception)
        traceback.print_exc()
        logging.error(red("Error:"))
        logging.error(red(output_error))
        logging.error(
            "Please report an issue to https://github.com/crytic/slither/issues"
        )

    except Exception:  # pylint: disable=broad-except
        output_error = traceback.format_exc()
        logging.error(traceback.print_exc())
        logging.error(f"Error in {args.filename}")  # pylint: disable=logging-fstring-interpolation
        logging.error(output_error)

    # If we are outputting JSON, capture the redirected output and disable the redirect to output the final JSON.
    if outputting_json:
        if "console" in args.json_types:
            json_results["console"] = {
                "stdout": StandardOutputCapture.get_stdout_output(),
                "stderr": StandardOutputCapture.get_stderr_output(),
            }
        StandardOutputCapture.disable()
        output_to_json(None if outputting_json_stdout else args.json,
                       output_error, json_results)

    if outputting_sarif:
        StandardOutputCapture.disable()
        output_to_sarif(None if outputting_sarif_stdout else args.sarif,
                        json_results, detector_classes)

    if outputting_zip:
        output_to_zip(args.zip, output_error, json_results, args.zip_type)

    if args.perf:
        cp.disable()
        stats = pstats.Stats(cp).sort_stats("cumtime")
        stats.print_stats()

    # Exit with the appropriate status code
    if output_error:
        sys.exit(-1)
    else:
        my_exit(results_detectors)