Ejemplo n.º 1
0
def process_truffle(dirname, args, detector_classes, printer_classes):
    cmd = ['npx', args.truffle_version, 'compile'
           ] if args.truffle_version else ['truffle', 'compile']
    logger.info('truffle compile running...')
    process = subprocess.Popen(cmd,
                               stdout=subprocess.PIPE,
                               stderr=subprocess.PIPE)

    stdout, stderr = process.communicate()
    stdout, stderr = stdout.decode(), stderr.decode(
    )  # convert bytestrings to unicode strings

    logger.info(stdout)

    if stderr:
        logger.error(stderr)

    if not os.path.isdir(os.path.join(dirname, 'build'))\
        or not os.path.isdir(os.path.join(dirname, 'build', 'contracts')):
        logger.info(
            red('No truffle build directory found, did you run `truffle compile`?'
                ))
        return ([], 0)

    filenames = glob.glob(os.path.join(dirname, 'build', 'contracts',
                                       '*.json'))

    all_contracts = []
    all_filenames = []

    for filename in filenames:
        with open(filename) as f:
            contract_loaded = json.load(f)
            all_contracts.append(contract_loaded['ast'])
            all_filenames.append(contract_loaded['sourcePath'])

    slither = Slither(all_contracts, args.solc, args.disable_solc_warnings,
                      args.solc_args)
    return _process(slither, detector_classes, printer_classes)
Ejemplo n.º 2
0
    def get_detectors_result(self) -> Tuple[str, List[Dict], int, int, int, int, int]:
        (
            all_results,
            optimization,
            informational,
            low,
            medium,
            high,
        ) = self._get_detectors_result()
        txt = "Number of optimization issues: {}\n".format(green(optimization))
        txt += "Number of informational issues: {}\n".format(green(informational))
        txt += "Number of low issues: {}\n".format(green(low))
        if medium > 0:
            txt += "Number of medium issues: {}\n".format(yellow(medium))
        else:
            txt += "Number of medium issues: {}\n".format(green(medium))
        if high > 0:
            txt += "Number of high issues: {}\n".format(red(high))
        else:
            txt += "Number of high issues: {}\n\n".format(green(high))

        return txt, all_results, optimization, informational, low, medium, high
Ejemplo n.º 3
0
def process_truffle(dirname, args, detector_classes, printer_classes):
    if not os.path.isdir(os.path.join(dirname, 'build'))\
        or not os.path.isdir(os.path.join(dirname, 'build', 'contracts')):
        logger.info(
            red('No truffle build directory found, did you run `truffle compile`?'
                ))
        return (0, 0)

    filenames = glob.glob(os.path.join(dirname, 'build', 'contracts',
                                       '*.json'))

    all_contracts = []

    for filename in filenames:
        with open(filename) as f:
            contract_loaded = json.load(f)
            all_contracts += contract_loaded['ast']['nodes']

    contract = {"nodeType": "SourceUnit", "nodes": all_contracts}

    slither = Slither(contract, args.solc, args.disable_solc_warnings,
                      args.solc_args)
    return _process(slither, detector_classes, printer_classes)
Ejemplo n.º 4
0
def check_variable_initialization(contract):
    results = {'variables-initialized': []}

    logger.info(
        green(
            '\n## Run variable initialization checks... (see https://github.com/crytic/slither/wiki/Upgradeability-Checks)'
        ))

    error_found = False

    for s in contract.state_variables:
        if s.initialized and not s.is_constant:
            info = f'{s.canonical_name} has an initial value ({s.source_mapping_str})'
            logger.info(red(info))
            res = output.Output(info)
            res.add(s)
            results['variables-initialized'].append(res.data)
            error_found = True

    if not error_found:
        logger.info(green('No error found'))

    return results
Ejemplo n.º 5
0
    def _check(self):
        results = []

        # TODO: handle MultipleInitTarget
        try:
            most_derived_init = _get_most_derived_init(self.contract)
        except MultipleInitTarget:
            logger.error(red(f"Too many init targets in {self.contract}"))
            return []

        if most_derived_init is None:
            return []

        all_init_functions = _get_initialize_functions(self.contract)
        all_init_functions_called = _get_all_internal_calls(
            most_derived_init) + [most_derived_init]
        missing_calls = [
            f for f in all_init_functions if not f in all_init_functions_called
        ]
        for f in missing_calls:
            info = ["Missing call to ", f, " in ", most_derived_init, ".\n"]
            json = self.generate_result(info)
            results.append(json)
        return results
Ejemplo n.º 6
0
def main_impl(all_detector_classes, all_printer_classes):
    """
    :param all_detector_classes: A list of all detectors that can be included/excluded.
    :param all_printer_classes: A list of all printers that can be included.
    """
    args = parse_args(all_detector_classes, all_printer_classes)

    # Set colorization option
    set_colorization_enabled(not args.disable_color)

    printer_classes = choose_printers(args, all_printer_classes)
    detector_classes = choose_detectors(args, all_detector_classes)

    default_log = logging.INFO if not args.debug else logging.DEBUG

    for (l_name, l_level) in [('Slither', default_log),
                              ('Contract', default_log),
                              ('Function', default_log), ('Node', default_log),
                              ('Parsing', default_log),
                              ('Detectors', default_log),
                              ('FunctionSolc', default_log),
                              ('ExpressionParsing', default_log),
                              ('TypeParsing', default_log),
                              ('SSA_Conversion', default_log),
                              ('Printers', default_log)]:
        l = logging.getLogger(l_name)
        l.setLevel(l_level)

    try:
        filename = args.filename

        globbed_filenames = glob.glob(filename, recursive=True)

        if os.path.isfile(filename):
            (results, number_contracts) = process(filename, args,
                                                  detector_classes,
                                                  printer_classes)

        elif os.path.isfile(os.path.join(
                filename, 'truffle.js')) or os.path.isfile(
                    os.path.join(filename, 'truffle-config.js')):
            (results,
             number_contracts) = process_truffle(filename, args,
                                                 detector_classes,
                                                 printer_classes)

        elif os.path.isdir(filename) or len(globbed_filenames) > 0:
            extension = "*.sol" if not args.solc_ast else "*.json"
            filenames = glob.glob(os.path.join(filename, extension))
            if not filenames:
                filenames = globbed_filenames
            number_contracts = 0
            results = []
            if args.splitted and args.solc_ast:
                (results,
                 number_contracts) = process_files(filenames, args,
                                                   detector_classes,
                                                   printer_classes)
            else:
                for filename in filenames:
                    (results_tmp,
                     number_contracts_tmp) = process(filename, args,
                                                     detector_classes,
                                                     printer_classes)
                    number_contracts += number_contracts_tmp
                    results += results_tmp

        else:
            raise Exception("Unrecognised file/dir path: '#{filename}'".format(
                filename=filename))

        if args.json:
            output_json(results, args.json)
        if args.checklist:
            output_results_to_markdown(results)
        # Dont print the number of result for printers
        if number_contracts == 0:
            logger.warn(red('No contract was analyzed'))
        if printer_classes:
            logger.info('%s analyzed (%d contracts)', filename,
                        number_contracts)
        else:
            logger.info('%s analyzed (%d contracts), %d result(s) found',
                        filename, number_contracts, len(results))
        exit(results)

    except Exception:
        logging.error('Error in %s' % args.filename)
        logging.error(traceback.format_exc())
        sys.exit(-1)
Ejemplo n.º 7
0
def main_impl(all_detector_classes, all_printer_classes):
    """
    :param all_detector_classes: A list of all detectors that can be included/excluded.
    :param all_printer_classes: A list of all printers that can be included.
    """
    # Set logger of Slither to info, to catch warnings related to the arg parsing
    logger.setLevel(logging.INFO)
    args = parse_args(all_detector_classes, all_printer_classes)

    # Set colorization option
    set_colorization_enabled(not args.disable_color)

    # Define some variables for potential JSON output
    json_results = {}
    output_error = None
    outputting_json = args.json is not None
    outputting_json_stdout = args.json == '-'
    outputting_zip = args.zip is not None
    if args.zip_type not in ZIP_TYPES_ACCEPTED.keys():
        logger.error(f'Zip type not accepted, it must be one of {",".join(ZIP_TYPES_ACCEPTED.keys())}')

    # If we are outputting JSON, capture all standard output. If we are outputting to stdout, we block typical stdout
    # output.
    if outputting_json:
        StandardOutputCapture.enable(outputting_json_stdout)

    printer_classes = choose_printers(args, all_printer_classes)
    detector_classes = choose_detectors(args, all_detector_classes)

    default_log = logging.INFO if not args.debug else logging.DEBUG

    for (l_name, l_level) in [('Slither', default_log),
                              ('Contract', default_log),
                              ('Function', default_log),
                              ('Node', default_log),
                              ('Parsing', default_log),
                              ('Detectors', default_log),
                              ('FunctionSolc', default_log),
                              ('ExpressionParsing', default_log),
                              ('TypeParsing', default_log),
                              ('SSA_Conversion', default_log),
                              ('Printers', default_log),
                              # ('CryticCompile', default_log)
                              ]:
        l = logging.getLogger(l_name)
        l.setLevel(l_level)

    console_handler = logging.StreamHandler()
    console_handler.setLevel(logging.INFO)

    console_handler.setFormatter(FormatterCryticCompile())

    crytic_compile_error = logging.getLogger(('CryticCompile'))
    crytic_compile_error.addHandler(console_handler)
    crytic_compile_error.propagate = False
    crytic_compile_error.setLevel(logging.INFO)

    results_detectors = []
    results_printers = []
    try:
        filename = args.filename

        # Determine if we are handling ast from solc
        if args.solc_ast or (filename.endswith('.json') and not is_supported(filename)):
            globbed_filenames = glob.glob(filename, recursive=True)
            filenames = glob.glob(os.path.join(filename, "*.json"))
            if not filenames:
                filenames = globbed_filenames
            number_contracts = 0

            slither_instances = []
            if args.splitted:
                (slither_instance, results_detectors, results_printers, number_contracts) = process_from_asts(filenames,
                                                                                                              args,
                                                                                                              detector_classes,
                                                                                                              printer_classes)
                slither_instances.append(slither_instance)
            else:
                for filename in filenames:
                    (slither_instance, results_detectors_tmp, results_printers_tmp,
                     number_contracts_tmp) = process_single(filename, args, detector_classes, printer_classes)
                    number_contracts += number_contracts_tmp
                    results_detectors += results_detectors_tmp
                    results_printers += results_printers_tmp
                    slither_instances.append(slither_instance)

        # Rely on CryticCompile to discern the underlying type of compilations.
        else:
            (slither_instances, results_detectors, results_printers, number_contracts) = process_all(filename, args,
                                                                                                     detector_classes,
                                                                                                     printer_classes)

        # Determine if we are outputting JSON
        if outputting_json or outputting_zip:
            # Add our compilation information to JSON
            if 'compilations' in args.json_types:
                compilation_results = []
                for slither_instance in slither_instances:
                    compilation_results.append(generate_standard_export(slither_instance.crytic_compile))
                json_results['compilations'] = compilation_results

            # Add our detector results to JSON if desired.
            if results_detectors and 'detectors' in args.json_types:
                json_results['detectors'] = results_detectors

            # Add our printer results to JSON if desired.
            if results_printers and 'printers' in args.json_types:
                json_results['printers'] = results_printers

            # Add our detector types to JSON
            if 'list-detectors' in args.json_types:
                detectors, _ = get_detectors_and_printers()
                json_results['list-detectors'] = output_detectors_json(detectors)

            # Add our detector types to JSON
            if 'list-printers' in args.json_types:
                _, printers = get_detectors_and_printers()
                json_results['list-printers'] = output_printers_json(printers)

        # Output our results to markdown if we wish to compile a checklist.
        if args.checklist:
            output_results_to_markdown(results_detectors)

        # Dont print the number of result for printers
        if number_contracts == 0:
            logger.warning(red('No contract was analyzed'))
        if printer_classes:
            logger.info('%s analyzed (%d contracts)', filename, number_contracts)
        else:
            logger.info('%s analyzed (%d contracts with %d detectors), %d result(s) found', filename,
                        number_contracts, len(detector_classes), len(results_detectors))

        logger.info(blue('Use https://crytic.io/ to get access to additional detectors and Github integration'))
        if args.ignore_return_value:
            return

    except SlitherException as se:
        output_error = str(se)
        traceback.print_exc()
        logging.error(red('Error:'))
        logging.error(red(output_error))
        logging.error('Please report an issue to https://github.com/crytic/slither/issues')

    except Exception:
        output_error = traceback.format_exc()
        logging.error(traceback.print_exc())
        logging.error('Error in %s' % args.filename)
        logging.error(output_error)


    # If we are outputting JSON, capture the redirected output and disable the redirect to output the final JSON.
    if outputting_json:
        if 'console' in args.json_types:
            json_results['console'] = {
                'stdout': StandardOutputCapture.get_stdout_output(),
                'stderr': StandardOutputCapture.get_stderr_output()
            }
        StandardOutputCapture.disable()
        output_to_json(None if outputting_json_stdout else args.json, output_error, json_results)

    if outputting_zip:
        output_to_zip(args.zip, output_error, json_results, args.zip_type)

    # Exit with the appropriate status code
    if output_error:
        sys.exit(-1)
    else:
        exit(results_detectors)
Ejemplo n.º 8
0
def generate_erc20(contract: Contract, type_property: str,
                   addresses: Addresses):  # pylint: disable=too-many-locals
    """
    Generate the ERC20 tests
    Files generated:
    - interfaces.sol: generic crytic interface
    - Properties[CONTRACTNAME].sol: erc20 properties
    - Test[CONTRACTNAME].sol: Target, its constructor needs to be manually updated
    - If truffle
        - migrations/x_Test[CONTRACTNAME].js
        - test/crytic/InitializationTest[CONTRACTNAME].js: unit tests to check that the contract is correctly configured
        - test/crytic/Test[CONTRACTNAME].js: ERC20 checks
    - echidna_config.yaml: configuration file
    :param addresses:
    :param contract:
    :param type_property: One of ERC20_PROPERTIES.keys()
    :return:
    """
    if contract.compilation_unit.core.crytic_compile is None:
        logging.error("Please compile with crytic-compile")
        return
    if contract.compilation_unit.core.crytic_compile.type not in [
            PlatformType.TRUFFLE,
            PlatformType.SOLC,
    ]:
        logging.error(
            f"{contract.compilation_unit.core.crytic_compile.type} not yet supported by slither-prop"
        )
        return

    # Check if the contract is an ERC20 contract and if the functions have the correct visibility
    errors = _check_compatibility(contract)
    if errors:
        logger.error(red(errors))
        return

    erc_properties = ERC20_PROPERTIES.get(type_property, None)
    if erc_properties is None:
        logger.error(
            f"{type_property} unknown. Types available {ERC20_PROPERTIES.keys()}"
        )
        return
    properties = erc_properties.properties

    # Generate the output directory
    output_dir = _platform_to_output_dir(
        contract.compilation_unit.core.crytic_compile.platform)
    output_dir.mkdir(exist_ok=True)

    # Get the properties
    solidity_properties, unit_tests = _get_properties(contract, properties)

    # Generate the contract containing the properties
    generate_solidity_interface(output_dir, addresses)
    property_file = generate_solidity_properties(contract, type_property,
                                                 solidity_properties,
                                                 output_dir)

    # Generate the Test contract
    initialization_recommendation = _initialization_recommendation(
        type_property)
    contract_filename, contract_name = generate_test_contract(
        contract,
        type_property,
        output_dir,
        property_file,
        initialization_recommendation,
    )

    # Generate Echidna config file
    echidna_config_filename = generate_echidna_config(
        Path(contract.compilation_unit.core.crytic_compile.target).parent,
        addresses)

    unit_test_info = ""

    # If truffle, generate unit tests
    if contract.compilation_unit.core.crytic_compile.type == PlatformType.TRUFFLE:
        unit_test_info = generate_truffle_test(contract, type_property,
                                               unit_tests, addresses)

    logger.info("################################################")
    logger.info(
        green(
            f"Update the constructor in {Path(output_dir, contract_filename)}")
    )

    if unit_test_info:
        logger.info(green(unit_test_info))

    logger.info(green("To run Echidna:"))
    txt = f"\t echidna-test {contract.compilation_unit.core.crytic_compile.target} "
    txt += f"--contract {contract_name} --config {echidna_config_filename}"
    logger.info(green(txt))
Ejemplo n.º 9
0
    def _analyze_contracts(self):
        if not self._contractsNotParsed:
            logger.info(f'No contract were found in {self.filename}, check the correct compilation') 
        if self._analyzed:
            raise Exception('Contract analysis can be run only once!')

        # First we save all the contracts in a dict
        # the key is the contractid
        for contract in self._contractsNotParsed:
            if contract.name in self._contracts:
                if contract.id != self._contracts[contract.name].id:
                    info = 'Slither does not handle projects with contract names re-use'
                    info += '\n{} is defined in:'.format(contract.name)
                    info += '\n- {}\n- {}'.format(contract.source_mapping_str,
                                               self._contracts[contract.name].source_mapping_str)
                    logger.error(info)
                    exit(-1)
            else:
                self._contracts_by_id[contract.id] = contract
                self._contracts[contract.name] = contract

        # Update of the inheritance 
        for contract in self._contractsNotParsed:
            # remove the first elem in linearizedBaseContracts as it is the contract itself
            ancestors = []
            fathers = []
            father_constructors = []
            try:
                # Resolve linearized base contracts.
                for i in contract.linearizedBaseContracts[1:]:
                    if i in contract.remapping:
                        ancestors.append(self.get_contract_from_name(contract.remapping[i]))
                    else:
                        ancestors.append(self._contracts_by_id[i])

                # Resolve immediate base contracts
                for i in contract.baseContracts:
                    if i in contract.remapping:
                        fathers.append(self.get_contract_from_name(contract.remapping[i]))
                    else:
                        fathers.append(self._contracts_by_id[i])

                # Resolve immediate base constructor calls
                for i in contract.baseConstructorContractsCalled:
                    if i in contract.remapping:
                        father_constructors.append(self.get_contract_from_name(contract.remapping[i]))
                    else:
                        father_constructors.append(self._contracts_by_id[i])

            except KeyError:
                logger.error(red('A contract was not found, it is likely that your codebase contains muliple contracts with the same name'))
                logger.error(red('Truffle does not handle this case during compilation'))
                logger.error(red('Please read https://github.com/trailofbits/slither/wiki#keyerror-or-nonetype-error'))
                logger.error(red('And update your code to remove the duplicate'))
                exit(-1)
            contract.setInheritance(ancestors, fathers, father_constructors)

        contracts_to_be_analyzed = self.contracts

        # Any contract can refer another contract enum without need for inheritance
        self._analyze_all_enums(contracts_to_be_analyzed)
        [c.set_is_analyzed(False) for c in self.contracts]

        libraries = [c for c in contracts_to_be_analyzed if c.contract_kind == 'library']
        contracts_to_be_analyzed = [c for c in contracts_to_be_analyzed if c.contract_kind != 'library']

        # We first parse the struct/variables/functions/contract
        self._analyze_first_part(contracts_to_be_analyzed, libraries)
        [c.set_is_analyzed(False) for c in self.contracts]

        # We analyze the struct and parse and analyze the events
        # A contract can refer in the variables a struct or a event from any contract
        # (without inheritance link)
        self._analyze_second_part(contracts_to_be_analyzed, libraries)
        [c.set_is_analyzed(False) for c in self.contracts]

        # Then we analyse state variables, functions and modifiers
        self._analyze_third_part(contracts_to_be_analyzed, libraries)

        self._analyzed = True

        self._convert_to_slithir()

        compute_dependency(self)
Ejemplo n.º 10
0
def main():
    json_results = {
        'check-initialization': defaultdict(dict),
        'variable-initialization': defaultdict(dict),
        'compare-function-ids': defaultdict(dict),
        'compare-variables-order-implementation': defaultdict(dict),
        'compare-variables-order-proxy': defaultdict(dict),
        'constant_conformance': defaultdict(dict),
        'proxy-present': False,
        'contract_v2-present': False
    }

    args = parse_args()

    v1_filename = vars(args)['contract.sol']

    try:
        v1 = Slither(v1_filename, **vars(args))

        # Analyze logic contract
        v1_name = args.ContractName
        v1_contract = v1.get_contract_from_name(v1_name)
        if v1_contract is None:
            info = 'Contract {} not found in {}'.format(v1_name, v1.filename)
            logger.error(red(info))
            if args.json:
                output_to_json(args.json, str(info),
                               {"upgradeability-check": json_results})
            return

        _checks_on_contract(v1_contract, json_results)

        # Analyze Proxy
        proxy_contract = None
        if args.proxy_name:
            if args.proxy_filename:
                proxy = Slither(args.proxy_filename, **vars(args))
            else:
                proxy = v1

            proxy_contract = proxy.get_contract_from_name(args.proxy_name)
            if proxy_contract is None:
                info = 'Proxy {} not found in {}'.format(
                    args.proxy_name, proxy.filename)
                logger.error(red(info))
                if args.json:
                    output_to_json(args.json, str(info),
                                   {"upgradeability-check": json_results})
                return
            json_results['proxy-present'] = True
            _checks_on_contract_and_proxy(v1_contract, proxy_contract,
                                          json_results)

        # Analyze new version
        if args.new_contract_name:
            if args.new_contract_filename:
                v2 = Slither(args.new_contract_filename, **vars(args))
            else:
                v2 = v1

            v2_contract = v2.get_contract_from_name(args.new_contract_name)
            if v2_contract is None:
                info = 'New logic contract {} not found in {}'.format(
                    args.new_contract_name, v2.filename)
                logger.error(red(info))
                if args.json:
                    output_to_json(args.json, str(info),
                                   {"upgradeability-check": json_results})
                return
            json_results['contract_v2-present'] = True

            if proxy_contract:
                _checks_on_contract_and_proxy(v2_contract,
                                              proxy_contract,
                                              json_results,
                                              missing_variable_check=False)

            _checks_on_contract_update(v1_contract, v2_contract, json_results)

        if args.json:
            output_to_json(args.json, None,
                           {"upgradeability-check": json_results})

    except SlitherException as e:
        logger.error(str(e))
        if args.json:
            output_to_json(args.json, str(e),
                           {"upgradeability-check": json_results})
        return
Ejemplo n.º 11
0
def parse_args(detector_classes, printer_classes):
    parser = argparse.ArgumentParser(
        description=
        'Slither. For usage information, see https://github.com/crytic/slither/wiki/Usage',
        usage="slither.py contract.sol [flag]")

    parser.add_argument('filename', help='contract.sol')

    parser.add_argument('--version',
                        help='displays the current version',
                        version=require('slither-analyzer')[0].version,
                        action='version')

    group_detector = parser.add_argument_group('Detectors')
    group_printer = parser.add_argument_group('Printers')
    group_solc = parser.add_argument_group('Solc options')
    group_truffle = parser.add_argument_group('Truffle options')
    group_embark = parser.add_argument_group('Embark options')
    group_misc = parser.add_argument_group('Additional option')

    group_detector.add_argument(
        '--detect',
        help='Comma-separated list of detectors, defaults to all, '
        'available detectors: {}'.format(', '.join(d.ARGUMENT
                                                   for d in detector_classes)),
        action='store',
        dest='detectors_to_run',
        default=defaults_flag_in_config['detectors_to_run'])

    group_printer.add_argument(
        '--print',
        help='Comma-separated list fo contract information printers, '
        'available printers: {}'.format(', '.join(d.ARGUMENT
                                                  for d in printer_classes)),
        action='store',
        dest='printers_to_run',
        default=defaults_flag_in_config['printers_to_run'])

    group_detector.add_argument('--list-detectors',
                                help='List available detectors',
                                action=ListDetectors,
                                nargs=0,
                                default=False)

    group_printer.add_argument('--list-printers',
                               help='List available printers',
                               action=ListPrinters,
                               nargs=0,
                               default=False)

    group_detector.add_argument(
        '--exclude',
        help='Comma-separated list of detectors that should be excluded',
        action='store',
        dest='detectors_to_exclude',
        default=defaults_flag_in_config['detectors_to_exclude'])

    group_detector.add_argument(
        '--exclude-informational',
        help='Exclude informational impact analyses',
        action='store_true',
        default=defaults_flag_in_config['exclude_informational'])

    group_detector.add_argument('--exclude-low',
                                help='Exclude low impact analyses',
                                action='store_true',
                                default=defaults_flag_in_config['exclude_low'])

    group_detector.add_argument(
        '--exclude-medium',
        help='Exclude medium impact analyses',
        action='store_true',
        default=defaults_flag_in_config['exclude_medium'])

    group_detector.add_argument(
        '--exclude-high',
        help='Exclude high impact analyses',
        action='store_true',
        default=defaults_flag_in_config['exclude_high'])

    group_solc.add_argument('--solc',
                            help='solc path',
                            action='store',
                            default=defaults_flag_in_config['solc'])

    group_solc.add_argument(
        '--solc-args',
        help=
        'Add custom solc arguments. Example: --solc-args "--allow-path /tmp --evm-version byzantium".',
        action='store',
        default=defaults_flag_in_config['solc_args'])

    group_solc.add_argument(
        '--disable-solc-warnings',
        help='Disable solc warnings',
        action='store_true',
        default=defaults_flag_in_config['disable_solc_warnings'])

    group_solc.add_argument('--solc-ast',
                            help='Provide the ast solc file',
                            action='store_true',
                            default=False)

    group_truffle.add_argument(
        '--truffle-ignore-compile',
        help='Do not run truffle compile',
        action='store_true',
        dest='truffle_ignore_compile',
        default=defaults_flag_in_config['truffle_ignore_compile'])

    group_truffle.add_argument(
        '--truffle-build-directory',
        help='Use an alternative truffle build directory',
        action='store',
        dest='truffle_build_directory',
        default=defaults_flag_in_config['truffle_build_directory'])

    group_truffle.add_argument(
        '--truffle-version',
        help='Use a local Truffle version (with npx)',
        action='store',
        default=defaults_flag_in_config['truffle_version'])

    group_embark.add_argument(
        '--embark-ignore-compile',
        help='Do not run embark build',
        action='store_true',
        dest='embark_ignore_compile',
        default=defaults_flag_in_config['embark_ignore_compile'])

    group_embark.add_argument(
        '--embark-overwrite-config',
        help=
        'Install @trailofbits/embark-contract-export and add it to embark.json',
        action='store_true',
        default=defaults_flag_in_config['embark_overwrite_config'])

    group_misc.add_argument('--json',
                            help='Export results as JSON',
                            action='store',
                            default=defaults_flag_in_config['json'])

    group_misc.add_argument('--disable-color',
                            help='Disable output colorization',
                            action='store_true',
                            default=defaults_flag_in_config['disable_color'])

    group_misc.add_argument(
        '--filter-paths',
        help='Comma-separated list of paths for which results will be excluded',
        action='store',
        dest='filter_paths',
        default=defaults_flag_in_config['filter_paths'])

    group_misc.add_argument(
        '--triage-mode',
        help='Run triage mode (save results in slither.db.json)',
        action='store_true',
        dest='triage_mode',
        default=False)

    group_misc.add_argument(
        '--config-file',
        help='Provide a config file (default: slither.config.json)',
        action='store',
        dest='config_file',
        default='slither.config.json')

    # debugger command
    parser.add_argument('--debug',
                        help=argparse.SUPPRESS,
                        action="store_true",
                        default=False)

    parser.add_argument('--markdown',
                        help=argparse.SUPPRESS,
                        action=OutputMarkdown,
                        default=False)

    group_misc.add_argument('--checklist',
                            help=argparse.SUPPRESS,
                            action='store_true',
                            default=False)

    parser.add_argument('--wiki-detectors',
                        help=argparse.SUPPRESS,
                        action=OutputWiki,
                        default=False)

    parser.add_argument('--list-detectors-json',
                        help=argparse.SUPPRESS,
                        action=ListDetectorsJson,
                        nargs=0,
                        default=False)

    parser.add_argument('--legacy-ast',
                        help=argparse.SUPPRESS,
                        action='store_true',
                        default=defaults_flag_in_config['legacy_ast'])

    parser.add_argument('--ignore-return-value',
                        help=argparse.SUPPRESS,
                        action='store_true',
                        default=False)

    # if the json is splitted in different files
    parser.add_argument('--splitted',
                        help=argparse.SUPPRESS,
                        action='store_true',
                        default=False)

    if len(sys.argv) == 1:
        parser.print_help(sys.stderr)
        sys.exit(1)

    args = parser.parse_args()

    if os.path.isfile(args.config_file):
        try:
            with open(args.config_file) as f:
                config = json.load(f)
                for key, elem in config.items():
                    if key not in defaults_flag_in_config:
                        logger.info(
                            yellow('{} has an unknown key: {} : {}'.format(
                                args.config_file, key, elem)))
                        continue
                    if getattr(args, key) == defaults_flag_in_config[key]:
                        setattr(args, key, elem)
        except json.decoder.JSONDecodeError as e:
            logger.error(
                red('Impossible to read {}, please check the file {}'.format(
                    args.config_file, e)))

    return args
Ejemplo n.º 12
0
def check_initialization(contract):

    results = {
        'Initializable-present': False,
        'Initializable-inherited': False,
        'Initializable.initializer()-present': False,
        'missing-initializer-modifier': [],
        'initialize_target': {},
        'missing-calls': [],
        'multiple-calls': []
    }

    error_found = False

    logger.info(
        green(
            '\n## Run initialization checks... (see https://github.com/crytic/slither/wiki/Upgradeability-Checks#initialization-checks)'
        ))

    # Check if the Initializable contract is present
    initializable = contract.slither.get_contract_from_name('Initializable')
    if initializable is None:
        logger.info(
            yellow(
                'Initializable contract not found, the contract does not follow a standard initalization schema.'
            ))
        return results
    results['Initializable-present'] = True

    # Check if the Initializable contract is inherited
    if initializable not in contract.inheritance:
        logger.info(
            yellow('The logic contract does not call the initializer.'))
        return results
    results['Initializable-inherited'] = True

    # Check if the Initializable contract is inherited
    initializer = contract.get_modifier_from_canonical_name(
        'Initializable.initializer()')
    if initializer is None:
        logger.info(yellow('Initializable.initializer() does not exist'))
        return results
    results['Initializable.initializer()-present'] = True

    # Check if a init function lacks the initializer modifier
    initializer_modifier_missing = False
    all_init_functions = _get_initialize_functions(contract)
    for f in all_init_functions:
        if not initializer in f.modifiers:
            initializer_modifier_missing = True
            info = f'{f.canonical_name} does not call the initializer modifier'
            logger.info(red(info))
            res = Output(info)
            res.add(f)
            results['missing-initializer-modifier'].append(res.data)

    if not initializer_modifier_missing:
        logger.info(
            green('All the init functions have the initializer modifier'))

    # Check if we can determine the initialize function that will be called
    # TODO: handle MultipleInitTarget
    try:
        most_derived_init = _get_most_derived_init(contract)
    except MultipleInitTarget:
        logger.info(red('Too many init targets'))
        return results

    if most_derived_init is None:
        init_info = f'{contract.name} has no initialize function\n'
        logger.info(green(init_info))
        results['initialize_target'] = {}
        return results
    # results['initialize_target'] is set at the end, as we want to print it last

    # Check if an initialize function is not called from the most_derived_init function
    missing_call = False
    all_init_functions_called = _get_all_internal_calls(most_derived_init) + [
        most_derived_init
    ]
    missing_calls = [
        f for f in all_init_functions if not f in all_init_functions_called
    ]
    for f in missing_calls:
        info = f'Missing call to {f.canonical_name} in {most_derived_init.canonical_name}'
        logger.info(red(info))
        res = Output(info)
        res.add(f, {"is_most_derived_init_function": False})
        res.add(most_derived_init, {"is_most_derived_init_function": True})
        results['missing-calls'].append(res.data)
        missing_call = True
    if not missing_call:
        logger.info(green('No missing call to an init function found'))

    # Check if an init function is called multiple times
    double_calls = list(
        set([
            f for f in all_init_functions_called
            if all_init_functions_called.count(f) > 1
        ]))
    double_calls_found = False
    for f in double_calls:
        info = f'{f.canonical_name} is called multiple times in {most_derived_init.full_name}'
        logger.info(red(info))
        res = Output(info)
        res.add(f)
        results['multiple-calls'].append(res.data)
        double_calls_found = True
    if not double_calls_found:
        logger.info(green('No double call to init functions found'))

    # Print the initialize_target info

    init_info = f'{contract.name} needs to be initialized by {most_derived_init.full_name}\n'
    logger.info(
        green(
            'Check the deployement script to ensure that these functions are called:\n'
            + init_info))
    res = Output(init_info)
    res.add(most_derived_init)
    results['initialize_target'] = res.data

    if not error_found:
        logger.info(green('No error found'))

    return results
Ejemplo n.º 13
0
def compare_variables_order(contract1, contract2, missing_variable_check=True):

    results = {
        'missing_variables': [],
        'different-variables': [],
        'extra-variables': []
    }

    logger.info(
        green(
            f'\n## Run variables ordering checks between {contract1.name} and {contract2.name}... (see https://github.com/crytic/slither/wiki/Upgradeability-Checks#variables-order-checks)'
        ))

    order1 = [
        variable for variable in contract1.state_variables
        if not variable.is_constant
    ]
    order2 = [
        variable for variable in contract2.state_variables
        if not variable.is_constant
    ]

    error_found = False
    idx = 0
    for idx in range(0, len(order1)):
        variable1 = order1[idx]
        if len(order2) <= idx:
            if missing_variable_check:
                info = f'Variable only in {contract1.name}: {variable1.name} ({variable1.source_mapping_str})'
                logger.info(yellow(info))

                res = Output(info)
                res.add(variable1)
                results['missing_variables'].append(res.data)

                error_found = True
            continue

        variable2 = order2[idx]

        if (variable1.name != variable2.name) or (variable1.type !=
                                                  variable2.type):
            info = f'Different variables between {contract1.name} and {contract2.name}:\n'
            info += f'\t Variable {idx} in {contract1.name}: {variable1.name} {variable1.type} ({variable1.source_mapping_str})\n'
            info += f'\t Variable {idx} in {contract2.name}: {variable2.name} {variable2.type} ({variable2.source_mapping_str})\n'
            logger.info(red(info))

            res = Output(info, additional_fields={'index': idx})
            res.add(variable1)
            res.add(variable2)
            results['different-variables'].append(res.data)

            error_found = True

    idx = idx + 1

    while idx < len(order2):
        variable2 = order2[idx]

        info = f'Extra variables in {contract2.name}: {variable2.name} ({variable2.source_mapping_str})\n'
        logger.info(yellow(info))
        res = Output(info, additional_fields={'index': idx})
        res.add(variable2)
        results['extra-variables'].append(res.data)
        idx = idx + 1

    if not error_found:
        logger.info(green('No error found'))

    return results
Ejemplo n.º 14
0
def main_impl(all_detector_classes, all_printer_classes):
    """
    :param all_detector_classes: A list of all detectors that can be included/excluded.
    :param all_printer_classes: A list of all printers that can be included.
    """
    # Set logger of Slither to info, to catch warnings related to the arg parsing
    logger.setLevel(logging.INFO)
    args = parse_args(all_detector_classes, all_printer_classes)

    cp: Optional[cProfile.Profile] = None
    if args.perf:
        cp = cProfile.Profile()
        cp.enable()

    # Set colorization option
    set_colorization_enabled(not args.disable_color)

    # Define some variables for potential JSON output
    json_results = {}
    output_error = None
    outputting_json = args.json is not None
    outputting_json_stdout = args.json == "-"
    outputting_sarif = args.sarif is not None
    outputting_sarif_stdout = args.sarif == "-"
    outputting_zip = args.zip is not None
    if args.zip_type not in ZIP_TYPES_ACCEPTED.keys():
        to_log = f'Zip type not accepted, it must be one of {",".join(ZIP_TYPES_ACCEPTED.keys())}'
        logger.error(to_log)

    # If we are outputting JSON, capture all standard output. If we are outputting to stdout, we block typical stdout
    # output.
    if outputting_json or output_to_sarif:
        StandardOutputCapture.enable(outputting_json_stdout
                                     or outputting_sarif_stdout)

    printer_classes = choose_printers(args, all_printer_classes)
    detector_classes = choose_detectors(args, all_detector_classes)

    default_log = logging.INFO if not args.debug else logging.DEBUG

    for (l_name, l_level) in [
        ("Slither", default_log),
        ("Contract", default_log),
        ("Function", default_log),
        ("Node", default_log),
        ("Parsing", default_log),
        ("Detectors", default_log),
        ("FunctionSolc", default_log),
        ("ExpressionParsing", default_log),
        ("TypeParsing", default_log),
        ("SSA_Conversion", default_log),
        ("Printers", default_log),
            # ('CryticCompile', default_log)
    ]:
        logger_level = logging.getLogger(l_name)
        logger_level.setLevel(l_level)

    console_handler = logging.StreamHandler()
    console_handler.setLevel(logging.INFO)

    console_handler.setFormatter(FormatterCryticCompile())

    crytic_compile_error = logging.getLogger(("CryticCompile"))
    crytic_compile_error.addHandler(console_handler)
    crytic_compile_error.propagate = False
    crytic_compile_error.setLevel(logging.INFO)

    results_detectors = []
    results_printers = []
    try:
        filename = args.filename

        # Determine if we are handling ast from solc
        if args.solc_ast or (filename.endswith(".json")
                             and not is_supported(filename)):
            globbed_filenames = glob.glob(filename, recursive=True)
            filenames = glob.glob(os.path.join(filename, "*.json"))
            if not filenames:
                filenames = globbed_filenames
            number_contracts = 0

            slither_instances = []
            if args.splitted:
                (
                    slither_instance,
                    results_detectors,
                    results_printers,
                    number_contracts,
                ) = process_from_asts(filenames, args, detector_classes,
                                      printer_classes)
                slither_instances.append(slither_instance)
            else:
                for filename in filenames:
                    (
                        slither_instance,
                        results_detectors_tmp,
                        results_printers_tmp,
                        number_contracts_tmp,
                    ) = process_single(filename, args, detector_classes,
                                       printer_classes)
                    number_contracts += number_contracts_tmp
                    results_detectors += results_detectors_tmp
                    results_printers += results_printers_tmp
                    slither_instances.append(slither_instance)

        # Rely on CryticCompile to discern the underlying type of compilations.
        else:
            (
                slither_instances,
                results_detectors,
                results_printers,
                number_contracts,
            ) = process_all(filename, args, detector_classes, printer_classes)

        # Determine if we are outputting JSON
        if outputting_json or outputting_zip or output_to_sarif:
            # Add our compilation information to JSON
            if "compilations" in args.json_types:
                compilation_results = []
                for slither_instance in slither_instances:
                    compilation_results.append(
                        generate_standard_export(
                            slither_instance.crytic_compile))
                json_results["compilations"] = compilation_results

            # Add our detector results to JSON if desired.
            if results_detectors and "detectors" in args.json_types:
                json_results["detectors"] = results_detectors

            # Add our printer results to JSON if desired.
            if results_printers and "printers" in args.json_types:
                json_results["printers"] = results_printers

            # Add our detector types to JSON
            if "list-detectors" in args.json_types:
                detectors, _ = get_detectors_and_printers()
                json_results["list-detectors"] = output_detectors_json(
                    detectors)

            # Add our detector types to JSON
            if "list-printers" in args.json_types:
                _, printers = get_detectors_and_printers()
                json_results["list-printers"] = output_printers_json(printers)

        # Output our results to markdown if we wish to compile a checklist.
        if args.checklist:
            output_results_to_markdown(results_detectors, args.checklist_limit)

        # Dont print the number of result for printers
        if number_contracts == 0:
            logger.warning(red("No contract was analyzed"))
        if printer_classes:
            logger.info("%s analyzed (%d contracts)", filename,
                        number_contracts)
        else:
            logger.info(
                "%s analyzed (%d contracts with %d detectors), %d result(s) found",
                filename,
                number_contracts,
                len(detector_classes),
                len(results_detectors),
            )
        if args.ignore_return_value:
            return

    except SlitherException as slither_exception:
        output_error = str(slither_exception)
        traceback.print_exc()
        logging.error(red("Error:"))
        logging.error(red(output_error))
        logging.error(
            "Please report an issue to https://github.com/crytic/slither/issues"
        )

    except Exception:  # pylint: disable=broad-except
        output_error = traceback.format_exc()
        logging.error(traceback.print_exc())
        logging.error(f"Error in {args.filename}")  # pylint: disable=logging-fstring-interpolation
        logging.error(output_error)

    # If we are outputting JSON, capture the redirected output and disable the redirect to output the final JSON.
    if outputting_json:
        if "console" in args.json_types:
            json_results["console"] = {
                "stdout": StandardOutputCapture.get_stdout_output(),
                "stderr": StandardOutputCapture.get_stderr_output(),
            }
        StandardOutputCapture.disable()
        output_to_json(None if outputting_json_stdout else args.json,
                       output_error, json_results)

    if outputting_sarif:
        StandardOutputCapture.disable()
        output_to_sarif(None if outputting_sarif_stdout else args.sarif,
                        json_results, detector_classes)

    if outputting_zip:
        output_to_zip(args.zip, output_error, json_results, args.zip_type)

    if args.perf:
        cp.disable()
        stats = pstats.Stats(cp).sort_stats("cumtime")
        stats.print_stats()

    # Exit with the appropriate status code
    if output_error:
        sys.exit(-1)
    else:
        my_exit(results_detectors)
Ejemplo n.º 15
0
def parse_args(detector_classes, printer_classes):
    parser = argparse.ArgumentParser(
        description=
        'Slither. For usage information, see https://github.com/crytic/slither/wiki/Usage',
        usage="slither.py contract.sol [flag]")

    parser.add_argument('filename', help='contract.sol')

    cryticparser.init(parser)

    parser.add_argument('--version',
                        help='displays the current version',
                        version=require('slither-analyzer')[0].version,
                        action='version')

    group_detector = parser.add_argument_group('Detectors')
    group_printer = parser.add_argument_group('Printers')
    group_misc = parser.add_argument_group('Additional option')

    group_detector.add_argument(
        '--detect',
        help='Comma-separated list of detectors, defaults to all, '
        'available detectors: {}'.format(', '.join(d.ARGUMENT
                                                   for d in detector_classes)),
        action='store',
        dest='detectors_to_run',
        default=defaults_flag_in_config['detectors_to_run'])

    group_printer.add_argument(
        '--print',
        help='Comma-separated list fo contract information printers, '
        'available printers: {}'.format(', '.join(d.ARGUMENT
                                                  for d in printer_classes)),
        action='store',
        dest='printers_to_run',
        default=defaults_flag_in_config['printers_to_run'])

    group_detector.add_argument('--list-detectors',
                                help='List available detectors',
                                action=ListDetectors,
                                nargs=0,
                                default=False)

    group_printer.add_argument('--list-printers',
                               help='List available printers',
                               action=ListPrinters,
                               nargs=0,
                               default=False)

    group_detector.add_argument(
        '--exclude',
        help='Comma-separated list of detectors that should be excluded',
        action='store',
        dest='detectors_to_exclude',
        default=defaults_flag_in_config['detectors_to_exclude'])

    group_detector.add_argument(
        '--exclude-informational',
        help='Exclude informational impact analyses',
        action='store_true',
        default=defaults_flag_in_config['exclude_informational'])

    group_detector.add_argument('--exclude-low',
                                help='Exclude low impact analyses',
                                action='store_true',
                                default=defaults_flag_in_config['exclude_low'])

    group_detector.add_argument(
        '--exclude-medium',
        help='Exclude medium impact analyses',
        action='store_true',
        default=defaults_flag_in_config['exclude_medium'])

    group_detector.add_argument(
        '--exclude-high',
        help='Exclude high impact analyses',
        action='store_true',
        default=defaults_flag_in_config['exclude_high'])

    group_misc.add_argument(
        '--json',
        help=
        'Export the results as a JSON file ("--json -" to export to stdout)',
        action='store',
        default=defaults_flag_in_config['json'])

    group_misc.add_argument('--disable-color',
                            help='Disable output colorization',
                            action='store_true',
                            default=defaults_flag_in_config['disable_color'])

    group_misc.add_argument(
        '--filter-paths',
        help='Comma-separated list of paths for which results will be excluded',
        action='store',
        dest='filter_paths',
        default=defaults_flag_in_config['filter_paths'])

    group_misc.add_argument(
        '--triage-mode',
        help='Run triage mode (save results in slither.db.json)',
        action='store_true',
        dest='triage_mode',
        default=False)

    group_misc.add_argument(
        '--config-file',
        help='Provide a config file (default: slither.config.json)',
        action='store',
        dest='config_file',
        default='slither.config.json')

    group_misc.add_argument('--solc-ast',
                            help='Provide the contract as a json AST',
                            action='store_true',
                            default=False)

    # debugger command
    parser.add_argument('--debug',
                        help=argparse.SUPPRESS,
                        action="store_true",
                        default=False)

    parser.add_argument('--markdown',
                        help=argparse.SUPPRESS,
                        action=OutputMarkdown,
                        default=False)

    group_misc.add_argument('--checklist',
                            help=argparse.SUPPRESS,
                            action='store_true',
                            default=False)

    parser.add_argument('--wiki-detectors',
                        help=argparse.SUPPRESS,
                        action=OutputWiki,
                        default=False)

    parser.add_argument('--list-detectors-json',
                        help=argparse.SUPPRESS,
                        action=ListDetectorsJson,
                        nargs=0,
                        default=False)

    parser.add_argument('--legacy-ast',
                        help=argparse.SUPPRESS,
                        action='store_true',
                        default=defaults_flag_in_config['legacy_ast'])

    parser.add_argument('--ignore-return-value',
                        help=argparse.SUPPRESS,
                        action='store_true',
                        default=defaults_flag_in_config['ignore_return_value'])

    # if the json is splitted in different files
    parser.add_argument('--splitted',
                        help=argparse.SUPPRESS,
                        action='store_true',
                        default=False)

    if len(sys.argv) == 1:
        parser.print_help(sys.stderr)
        sys.exit(1)

    args = parser.parse_args()

    if os.path.isfile(args.config_file):
        try:
            with open(args.config_file) as f:
                config = json.load(f)
                for key, elem in config.items():
                    if key not in defaults_flag_in_config:
                        logger.info(
                            yellow('{} has an unknown key: {} : {}'.format(
                                args.config_file, key, elem)))
                        continue
                    if getattr(args, key) == defaults_flag_in_config[key]:
                        setattr(args, key, elem)
        except json.decoder.JSONDecodeError as e:
            logger.error(
                red('Impossible to read {}, please check the file {}'.format(
                    args.config_file, e)))

    return args
Ejemplo n.º 16
0
def check_initialization(s):

    initializable = s.get_contract_from_name('Initializable')

    if initializable is None:
        logger.info(
            yellow(
                'Initializable contract not found, the contract does not follow a standard initalization schema.'
            ))
        return

    initializer = initializable.get_modifier_from_signature('initializer()')

    init_info = ''

    double_calls_found = False
    missing_call = False
    initializer_modifier_missing = False

    for contract in s.contracts:
        if initializable in contract.inheritance:
            all_init_functions = _get_initialize_functions(contract)
            for f in all_init_functions:
                if not initializer in f.modifiers:
                    initializer_modifier_missing = True
                    logger.info(
                        red(f'{f.contract.name}.{f.name} does not call initializer'
                            ))
            most_derived_init = _get_most_derived_init(contract)
            if most_derived_init is None:
                init_info += f'{contract.name} has no initialize function\n'
                continue
            else:
                init_info += f'{contract.name} needs to be initialized by {most_derived_init.full_name}\n'
            all_init_functions_called = _get_all_internal_calls(
                most_derived_init) + [most_derived_init]
            missing_calls = [
                f for f in all_init_functions
                if not f in all_init_functions_called
            ]
            for f in missing_calls:
                logger.info(
                    red(f'Missing call to {f.contract.name}.{f.name} in {contract.name}'
                        ))
                missing_call = True
            double_calls = list(
                set([
                    f for f in all_init_functions_called
                    if all_init_functions_called.count(f) > 1
                ]))
            for f in double_calls:
                logger.info(
                    red(f'{f.contract.name + "." + f.full_name} is called multiple time in {contract.name}'
                        ))
                double_calls_found = True

    if not initializer_modifier_missing:
        logger.info(
            green('All the init functions have the initiliazer modifier'))

    if not double_calls_found:
        logger.info(green('No double call to init functions found'))

    if not missing_call:
        logger.info(green('No missing call to an init function found'))

    logger.info(
        green(
            'Check the deployement script to ensure that these functions are called:\n'
            + init_info))
Ejemplo n.º 17
0
def main():
    json_results = {
        'proxy-present': False,
        'contract_v2-present': False,
        'detectors': []
    }

    args = parse_args()

    v1_filename = vars(args)['contract.sol']
    number_detectors_run = 0
    detectors = _get_checks()
    try:
        v1 = Slither(v1_filename, **vars(args))

        # Analyze logic contract
        v1_name = args.ContractName
        v1_contract = v1.get_contract_from_name(v1_name)
        if v1_contract is None:
            info = 'Contract {} not found in {}'.format(v1_name, v1.filename)
            logger.error(red(info))
            if args.json:
                output_to_json(args.json, str(info), json_results)
            return

        detectors_results, number_detectors = _checks_on_contract(
            detectors, v1_contract)
        json_results['detectors'] += detectors_results
        number_detectors_run += number_detectors

        # Analyze Proxy
        proxy_contract = None
        if args.proxy_name:
            if args.proxy_filename:
                proxy = Slither(args.proxy_filename, **vars(args))
            else:
                proxy = v1

            proxy_contract = proxy.get_contract_from_name(args.proxy_name)
            if proxy_contract is None:
                info = 'Proxy {} not found in {}'.format(
                    args.proxy_name, proxy.filename)
                logger.error(red(info))
                if args.json:
                    output_to_json(args.json, str(info), json_results)
                return
            json_results['proxy-present'] = True

            detectors_results, number_detectors = _checks_on_contract_and_proxy(
                detectors, v1_contract, proxy_contract)
            json_results['detectors'] += detectors_results
            number_detectors_run += number_detectors
        # Analyze new version
        if args.new_contract_name:
            if args.new_contract_filename:
                v2 = Slither(args.new_contract_filename, **vars(args))
            else:
                v2 = v1

            v2_contract = v2.get_contract_from_name(args.new_contract_name)
            if v2_contract is None:
                info = 'New logic contract {} not found in {}'.format(
                    args.new_contract_name, v2.filename)
                logger.error(red(info))
                if args.json:
                    output_to_json(args.json, str(info), json_results)
                return
            json_results['contract_v2-present'] = True

            if proxy_contract:
                detectors_results, _ = _checks_on_contract_and_proxy(
                    detectors, v2_contract, proxy_contract)

                json_results['detectors'] += detectors_results

            detectors_results, number_detectors = _checks_on_contract_update(
                detectors, v1_contract, v2_contract)
            json_results['detectors'] += detectors_results
            number_detectors_run += number_detectors

            # If there is a V2, we run the contract-only check on the V2
            detectors_results, _ = _checks_on_contract(detectors, v2_contract)
            json_results['detectors'] += detectors_results
            number_detectors_run += number_detectors

        logger.info(
            f'{len(json_results["detectors"])} findings, {number_detectors_run} detectors run'
        )
        if args.json:
            output_to_json(args.json, None, json_results)

    except SlitherException as e:
        logger.error(str(e))
        if args.json:
            output_to_json(args.json, str(e), json_results)
        return
Ejemplo n.º 18
0
def main():
    json_results = {
        "proxy-present": False,
        "contract_v2-present": False,
        "detectors": [],
    }

    args = parse_args()

    v1_filename = vars(args)["contract.sol"]
    number_detectors_run = 0
    detectors = _get_checks()
    try:
        variable1 = Slither(v1_filename, **vars(args))

        # Analyze logic contract
        v1_name = args.ContractName
        v1_contracts = variable1.get_contract_from_name(v1_name)
        if len(v1_contracts) != 1:
            info = f"Contract {v1_name} not found in {variable1.filename}"
            logger.error(red(info))
            if args.json:
                output_to_json(args.json, str(info), json_results)
            return
        v1_contract = v1_contracts[0]

        detectors_results, number_detectors = _checks_on_contract(
            detectors, v1_contract)
        json_results["detectors"] += detectors_results
        number_detectors_run += number_detectors

        # Analyze Proxy
        proxy_contract = None
        if args.proxy_name:
            if args.proxy_filename:
                proxy = Slither(args.proxy_filename, **vars(args))
            else:
                proxy = variable1

            proxy_contracts = proxy.get_contract_from_name(args.proxy_name)
            if len(proxy_contracts) != 1:
                info = f"Proxy {args.proxy_name} not found in {proxy.filename}"
                logger.error(red(info))
                if args.json:
                    output_to_json(args.json, str(info), json_results)
                return
            proxy_contract = proxy_contracts[0]
            json_results["proxy-present"] = True

            detectors_results, number_detectors = _checks_on_contract_and_proxy(
                detectors, v1_contract, proxy_contract)
            json_results["detectors"] += detectors_results
            number_detectors_run += number_detectors
        # Analyze new version
        if args.new_contract_name:
            if args.new_contract_filename:
                variable2 = Slither(args.new_contract_filename, **vars(args))
            else:
                variable2 = variable1

            v2_contracts = variable2.get_contract_from_name(
                args.new_contract_name)
            if len(v2_contracts) != 1:
                info = (
                    f"New logic contract {args.new_contract_name} not found in {variable2.filename}"
                )
                logger.error(red(info))
                if args.json:
                    output_to_json(args.json, str(info), json_results)
                return
            v2_contract = v2_contracts[0]
            json_results["contract_v2-present"] = True

            if proxy_contract:
                detectors_results, _ = _checks_on_contract_and_proxy(
                    detectors, v2_contract, proxy_contract)

                json_results["detectors"] += detectors_results

            detectors_results, number_detectors = _checks_on_contract_update(
                detectors, v1_contract, v2_contract)
            json_results["detectors"] += detectors_results
            number_detectors_run += number_detectors

            # If there is a V2, we run the contract-only check on the V2
            detectors_results, _ = _checks_on_contract(detectors, v2_contract)
            json_results["detectors"] += detectors_results
            number_detectors_run += number_detectors

        to_log = f'{len(json_results["detectors"])} findings, {number_detectors_run} detectors run'
        logger.info(to_log)
        if args.json:
            output_to_json(args.json, None, json_results)

    except SlitherException as slither_exception:
        logger.error(str(slither_exception))
        if args.json:
            output_to_json(args.json, str(slither_exception), json_results)
        return
Ejemplo n.º 19
0
    def output(self, _filename):
        """
        _filename is not used
        Args:
            _filename(string)
        """

        txt = ""
        if not self.slither.crytic_compile:
            txt = "The EVM printer requires to compile with crytic-compile"
            self.info(red(txt))
            res = self.generate_output(txt)
            return res
        evm_info = _extract_evm_info(self.slither)

        for contract in self.slither.contracts_derived:
            txt += blue("Contract {}\n".format(contract.name))

            contract_file = self.slither.source_code[
                contract.source_mapping["filename_absolute"]
            ].encode("utf-8")
            contract_file_lines = open(
                contract.source_mapping["filename_absolute"], "r"
            ).readlines()

            contract_pcs = {}
            contract_cfg = {}

            for function in contract.functions:
                txt += blue(f"\tFunction {function.canonical_name}\n")

                # CFG and source mapping depend on function being constructor or not
                if function.is_constructor:
                    contract_cfg = evm_info["cfg_init", contract.name]
                    contract_pcs = evm_info["mapping_init", contract.name]
                else:
                    contract_cfg = evm_info["cfg", contract.name]
                    contract_pcs = evm_info["mapping", contract.name]

                for node in function.nodes:
                    txt += green("\t\tNode: " + str(node) + "\n")
                    node_source_line = (
                        contract_file[0 : node.source_mapping["start"]].count("\n".encode("utf-8"))
                        + 1
                    )
                    txt += green(
                        "\t\tSource line {}: {}\n".format(
                            node_source_line,
                            contract_file_lines[node_source_line - 1].rstrip(),
                        )
                    )
                    txt += magenta("\t\tEVM Instructions:\n")
                    node_pcs = contract_pcs.get(node_source_line, [])
                    for pc in node_pcs:
                        txt += magenta(
                            "\t\t\t0x{:x}: {}\n".format(
                                int(pc), contract_cfg.get_instruction_at(pc)
                            )
                        )

            for modifier in contract.modifiers:
                txt += blue(f"\tModifier {modifier.canonical_name}\n")
                for node in modifier.nodes:
                    txt += green("\t\tNode: " + str(node) + "\n")
                    node_source_line = (
                        contract_file[0 : node.source_mapping["start"]].count("\n".encode("utf-8"))
                        + 1
                    )
                    txt += green(
                        "\t\tSource line {}: {}\n".format(
                            node_source_line,
                            contract_file_lines[node_source_line - 1].rstrip(),
                        )
                    )
                    txt += magenta("\t\tEVM Instructions:\n")
                    node_pcs = contract_pcs.get(node_source_line, [])
                    for pc in node_pcs:
                        txt += magenta(
                            "\t\t\t0x{:x}: {}\n".format(
                                int(pc), contract_cfg.get_instruction_at(pc)
                            )
                        )

        self.info(txt)
        res = self.generate_output(txt)
        return res
Ejemplo n.º 20
0
def main_impl(all_detector_classes, all_printer_classes):
    """
    :param all_detector_classes: A list of all detectors that can be included/excluded.
    :param all_printer_classes: A list of all printers that can be included.
    """
    args = parse_args(all_detector_classes, all_printer_classes)

    # Set colorization option
    set_colorization_enabled(not args.disable_color)

    # If we are outputting json to stdout, we'll want to disable any logging.
    stdout_json = args.json == "-"
    if stdout_json:
        logging.disable(logging.CRITICAL)

    printer_classes = choose_printers(args, all_printer_classes)
    detector_classes = choose_detectors(args, all_detector_classes)

    default_log = logging.INFO if not args.debug else logging.DEBUG

    for (l_name, l_level) in [
        ('Slither', default_log),
        ('Contract', default_log),
        ('Function', default_log),
        ('Node', default_log),
        ('Parsing', default_log),
        ('Detectors', default_log),
        ('FunctionSolc', default_log),
        ('ExpressionParsing', default_log),
        ('TypeParsing', default_log),
        ('SSA_Conversion', default_log),
        ('Printers', default_log),
            #('CryticCompile', default_log)
    ]:
        l = logging.getLogger(l_name)
        l.setLevel(l_level)

    console_handler = logging.StreamHandler()
    console_handler.setLevel(logging.INFO)

    console_handler.setFormatter(FormatterCryticCompile())

    crytic_compile_error = logging.getLogger(('CryticCompile'))
    crytic_compile_error.addHandler(console_handler)
    crytic_compile_error.propagate = False
    crytic_compile_error.setLevel(logging.INFO)

    try:
        filename = args.filename

        globbed_filenames = glob.glob(filename, recursive=True)

        if os.path.isfile(filename) or is_supported(filename):
            (results, number_contracts) = process(filename, args,
                                                  detector_classes,
                                                  printer_classes)

        elif os.path.isdir(filename) or len(globbed_filenames) > 0:
            extension = "*.sol" if not args.solc_ast else "*.json"
            filenames = glob.glob(os.path.join(filename, extension))
            if not filenames:
                filenames = globbed_filenames
            number_contracts = 0
            results = []
            if args.splitted and args.solc_ast:
                (results,
                 number_contracts) = process_files(filenames, args,
                                                   detector_classes,
                                                   printer_classes)
            else:
                for filename in filenames:
                    (results_tmp,
                     number_contracts_tmp) = process(filename, args,
                                                     detector_classes,
                                                     printer_classes)
                    number_contracts += number_contracts_tmp
                    results += results_tmp

        else:
            raise Exception("Unrecognised file/dir path: '#{filename}'".format(
                filename=filename))

        if args.json:
            output_json(results, None if stdout_json else args.json)
        if args.checklist:
            output_results_to_markdown(results)
        # Dont print the number of result for printers
        if number_contracts == 0:
            logger.warn(red('No contract was analyzed'))
        if printer_classes:
            logger.info('%s analyzed (%d contracts)', filename,
                        number_contracts)
        else:
            logger.info('%s analyzed (%d contracts), %d result(s) found',
                        filename, number_contracts, len(results))
        if args.ignore_return_value:
            return
        exit(results)

    except SlitherException as se:
        # Output our error accordingly, via JSON or logging.
        if stdout_json:
            print(json.dumps(wrap_json_detectors_results(False, str(se), [])))
        else:
            logging.error(red('Error:'))
            logging.error(red(se))
            logging.error(
                'Please report an issue to https://github.com/crytic/slither/issues'
            )
        sys.exit(-1)

    except Exception:
        # Output our error accordingly, via JSON or logging.
        if stdout_json:
            print(
                json.dumps(
                    wrap_json_detectors_results(False, traceback.format_exc(),
                                                [])))
        else:
            logging.error('Error in %s' % args.filename)
            logging.error(traceback.format_exc())
        sys.exit(-1)
Ejemplo n.º 21
0
def constant_conformance_check(contract_v1, contract_v2):

    results = {
        "became_constants": [],
        "were_constants": [],
        "not_found_in_v2": [],
    }

    logger.info(
        green(
            '\n## Run variable constants conformance check... (see https://github.com/crytic/slither/wiki/Upgradeability-Checks)'
        ))
    error_found = False

    state_variables_v1 = contract_v1.state_variables
    state_variables_v2 = contract_v2.state_variables

    v2_additional_variables = len(state_variables_v2) - len(state_variables_v1)
    if v2_additional_variables < 0:
        v2_additional_variables = 0

    # We keep two index, because we need to have them out of sync if v2
    # has additional non constant variables
    idx_v1 = 0
    idx_v2 = 0
    while idx_v1 < len(state_variables_v1):

        state_v1 = contract_v1.state_variables[idx_v1]
        if len(state_variables_v2) <= idx_v2:
            break

        state_v2 = contract_v2.state_variables[idx_v2]

        if state_v2:
            if state_v1.is_constant:
                if not state_v2.is_constant:

                    # If v2 has additional non constant variables, we need to skip them
                    if (state_v1.name != state_v2.name or state_v1.type !=
                            state_v2.type) and v2_additional_variables > 0:
                        v2_additional_variables -= 1
                        idx_v2 += 1
                        continue

                    info = f'{state_v1.canonical_name} ({state_v1.source_mapping_str}) was constant and {state_v2.canonical_name} is not ({state_v2.source_mapping_str})'
                    logger.info(red(info))

                    res = Output(info)
                    res.add(state_v1)
                    res.add(state_v2)
                    results['were_constants'].append(res.data)
                    error_found = True

            elif state_v2.is_constant:
                info = f'{state_v1.canonical_name} ({state_v1.source_mapping_str}) was not constant but {state_v2.canonical_name} is ({state_v2.source_mapping_str})'
                logger.info(red(info))

                res = Output(info)
                res.add(state_v1)
                res.add(state_v2)
                results['became_constants'].append(res.data)
                error_found = True

        else:
            info = f'{state_v1.canonical_name} not found in {contract_v2.name}, not check was done'
            logger.info(yellow(info))

            res = Output(info)
            res.add(state_v1)
            res.add(contract_v2)
            results['not_found_in_v2'].append(res.data)

            error_found = True

        idx_v1 += 1
        idx_v2 += 1

    if not error_found:
        logger.info(green('No error found'))

    return results