def format(self, record): # for i, msg in enumerate(record.msg): if record.msg.startswith("Compilation warnings/errors on "): txt = record.args[1] txt = txt.split("\n") txt = [red(x) if "Error" in x else x for x in txt] txt = "\n".join(txt) record.args = (record.args[0], txt) return super().format(record)
def get_summary_erc20(self, contract): txt = "" pause, mint_unlimited, race_condition_mitigated = self._get_summary_erc20( contract) if pause: txt += yellow("Pausable") + "\n" if mint_unlimited is None: txt += green("No Minting") + "\n" else: if mint_unlimited: txt += red("∞ Minting") + "\n" else: txt += yellow("Minting") + "\n" if not race_condition_mitigated: txt += red("Approve Race Cond.") + "\n" return txt
def is_complex_code(self, contract): """ Check if the code is complex Heuristic, the code is complex if: - One function has a cyclomatic complexity > 7 Args: contract """ is_complex = self._is_complex_code(contract) result = red("Yes") if is_complex else green("No") return result
def load_previous_results(self): filename = self._previous_results_filename try: if os.path.isfile(filename): with open(filename) as f: self._previous_results = json.load(f) if self._previous_results: for r in self._previous_results: if "id" in r: self._previous_results_ids.add(r["id"]) except json.decoder.JSONDecodeError: logger.error( red("Impossible to decode {}. Consider removing the file". format(filename)))
def main(): # ------------------------------ # PossiblePaths.py # Usage: python3 possible_paths.py filename targets # Example: python3 possible_paths.py contract.sol contract1.function1 contract2.function2 contract3.function3 # ------------------------------ # Parse all arguments args = parse_args() # Perform fortress analysis on the given filename fortress = Fortress(args.filename, **vars(args)) try: targets = resolve_functions(fortress, args.targets) except ResolveFunctionException as resolvefunction: print(red(resolvefunction)) sys.exit(-1) # Print out all target functions. print("Target functions:") for target in targets: print(f"- {target.contract_declarer.name}.{target.full_name}") print("\n") # Obtain all paths which reach the target functions. reaching_paths = find_target_paths(fortress, targets) reaching_functions = { y for x in reaching_paths for y in x if y not in targets } # Print out all function names which can reach the targets. print("The following functions reach the specified targets:") for function_desc in sorted( [f"{f.canonical_name}" for f in reaching_functions]): print(f"- {function_desc}") print("\n") # Format all function paths. reaching_paths_str = [ " -> ".join([f"{f.canonical_name}" for f in reaching_path]) for reaching_path in reaching_paths ] # Print a sorted list of all function paths which can reach the targets. print("The following paths reach the specified targets:") for reaching_path in sorted(reaching_paths_str): print(f"{reaching_path}\n")
def read_config_file(args): if os.path.isfile(args.config_file): try: with open(args.config_file) as f: config = json.load(f) for key, elem in config.items(): if key not in defaults_flag_in_config: logger.info( yellow("{} has an unknown key: {} : {}".format( args.config_file, key, elem))) continue if getattr(args, key) == defaults_flag_in_config[key]: setattr(args, key, elem) except json.decoder.JSONDecodeError as e: logger.error( red("Impossible to read {}, please check the file {}".format( args.config_file, e)))
def _check(self): results = [] # TODO: handle MultipleInitTarget try: most_derived_init = _get_most_derived_init(self.contract) except MultipleInitTarget: logger.error(red(f"Too many init targets in {self.contract}")) return [] if most_derived_init is None: return [] all_init_functions = _get_initialize_functions(self.contract) all_init_functions_called = _get_all_internal_calls( most_derived_init) + [most_derived_init] missing_calls = [ f for f in all_init_functions if not f in all_init_functions_called ] for f in missing_calls: info = ["Missing call to ", f, " in ", most_derived_init, ".\n"] json = self.generate_result(info) results.append(json) return results
def get_detectors_result( self) -> Tuple[str, List[Dict], int, int, int, int, int]: ( all_results, optimization, informational, low, medium, high, ) = self._get_detectors_result() txt = "Number of optimization issues: {}\n".format(green(optimization)) txt += "Number of informational issues: {}\n".format( green(informational)) txt += "Number of low issues: {}\n".format(green(low)) if medium > 0: txt += "Number of medium issues: {}\n".format(yellow(medium)) else: txt += "Number of medium issues: {}\n".format(green(medium)) if high > 0: txt += "Number of high issues: {}\n".format(red(high)) else: txt += "Number of high issues: {}\n\n".format(green(high)) return txt, all_results, optimization, informational, low, medium, high
def output(self, _filename): """ _filename is not used Args: _filename(string) """ txt = "" if not self.fortress.crytic_compile: txt = "The EVM printer requires to compile with crytic-compile" self.info(red(txt)) res = self.generate_output(txt) return res evm_info = _extract_evm_info(self.fortress) for contract in self.fortress.contracts_derived: txt += blue("Contract {}\n".format(contract.name)) contract_file = self.fortress.source_code[ contract.source_mapping["filename_absolute"]].encode("utf-8") contract_file_lines = open( contract.source_mapping["filename_absolute"], "r").readlines() contract_pcs = {} contract_cfg = {} for function in contract.functions: txt += blue(f"\tFunction {function.canonical_name}\n") # CFG and source mapping depend on function being constructor or not if function.is_constructor: contract_cfg = evm_info["cfg_init", contract.name] contract_pcs = evm_info["mapping_init", contract.name] else: contract_cfg = evm_info["cfg", contract.name] contract_pcs = evm_info["mapping", contract.name] for node in function.nodes: txt += green("\t\tNode: " + str(node) + "\n") node_source_line = ( contract_file[0:node.source_mapping["start"]].count( "\n".encode("utf-8")) + 1) txt += green("\t\tSource line {}: {}\n".format( node_source_line, contract_file_lines[node_source_line - 1].rstrip(), )) txt += magenta("\t\tEVM Instructions:\n") node_pcs = contract_pcs.get(node_source_line, []) for pc in node_pcs: txt += magenta("\t\t\t0x{:x}: {}\n".format( int(pc), contract_cfg.get_instruction_at(pc))) for modifier in contract.modifiers: txt += blue(f"\tModifier {modifier.canonical_name}\n") for node in modifier.nodes: txt += green("\t\tNode: " + str(node) + "\n") node_source_line = ( contract_file[0:node.source_mapping["start"]].count( "\n".encode("utf-8")) + 1) txt += green("\t\tSource line {}: {}\n".format( node_source_line, contract_file_lines[node_source_line - 1].rstrip(), )) txt += magenta("\t\tEVM Instructions:\n") node_pcs = contract_pcs.get(node_source_line, []) for pc in node_pcs: txt += magenta("\t\t\t0x{:x}: {}\n".format( int(pc), contract_cfg.get_instruction_at(pc))) self.info(txt) res = self.generate_output(txt) return res
def main(): json_results = { "proxy-present": False, "contract_v2-present": False, "detectors": [], } args = parse_args() v1_filename = vars(args)["contract.sol"] number_detectors_run = 0 detectors = _get_checks() try: variable1 = Fortress(v1_filename, **vars(args)) # Analyze logic contract v1_name = args.ContractName v1_contract = variable1.get_contract_from_name(v1_name) if v1_contract is None: info = "Contract {} not found in {}".format( v1_name, variable1.filename) logger.error(red(info)) if args.json: output_to_json(args.json, str(info), json_results) return detectors_results, number_detectors = _checks_on_contract( detectors, v1_contract) json_results["detectors"] += detectors_results number_detectors_run += number_detectors # Analyze Proxy proxy_contract = None if args.proxy_name: if args.proxy_filename: proxy = Fortress(args.proxy_filename, **vars(args)) else: proxy = variable1 proxy_contract = proxy.get_contract_from_name(args.proxy_name) if proxy_contract is None: info = "Proxy {} not found in {}".format( args.proxy_name, proxy.filename) logger.error(red(info)) if args.json: output_to_json(args.json, str(info), json_results) return json_results["proxy-present"] = True detectors_results, number_detectors = _checks_on_contract_and_proxy( detectors, v1_contract, proxy_contract) json_results["detectors"] += detectors_results number_detectors_run += number_detectors # Analyze new version if args.new_contract_name: if args.new_contract_filename: variable2 = Fortress(args.new_contract_filename, **vars(args)) else: variable2 = variable1 v2_contract = variable2.get_contract_from_name( args.new_contract_name) if v2_contract is None: info = "New logic contract {} not found in {}".format( args.new_contract_name, variable2.filename) logger.error(red(info)) if args.json: output_to_json(args.json, str(info), json_results) return json_results["contract_v2-present"] = True if proxy_contract: detectors_results, _ = _checks_on_contract_and_proxy( detectors, v2_contract, proxy_contract) json_results["detectors"] += detectors_results detectors_results, number_detectors = _checks_on_contract_update( detectors, v1_contract, v2_contract) json_results["detectors"] += detectors_results number_detectors_run += number_detectors # If there is a V2, we run the contract-only check on the V2 detectors_results, _ = _checks_on_contract(detectors, v2_contract) json_results["detectors"] += detectors_results number_detectors_run += number_detectors to_log = f'{len(json_results["detectors"])} findings, {number_detectors_run} detectors run' logger.info(to_log) if args.json: output_to_json(args.json, None, json_results) except FortressException as fortress_exception: logger.error(str(fortress_exception)) if args.json: output_to_json(args.json, str(fortress_exception), json_results) return
def main_impl(all_detector_classes, all_printer_classes): """ :param all_detector_classes: A list of all detectors that can be included/excluded. :param all_printer_classes: A list of all printers that can be included. """ # Set logger of Fortress to info, to catch warnings related to the arg parsing logger.setLevel(logging.INFO) args = parse_args(all_detector_classes, all_printer_classes) cp: Optional[cProfile.Profile] = None if args.perf: cp = cProfile.Profile() cp.enable() # Set colorization option set_colorization_enabled(not args.disable_color) # Define some variables for potential JSON output json_results = {} output_error = None outputting_json = args.json is not None outputting_json_stdout = args.json == "-" outputting_zip = args.zip is not None if args.zip_type not in ZIP_TYPES_ACCEPTED.keys(): to_log = f'Zip type not accepted, it must be one of {",".join(ZIP_TYPES_ACCEPTED.keys())}' logger.error(to_log) # If we are outputting JSON, capture all standard output. If we are outputting to stdout, we block typical stdout # output. if outputting_json: StandardOutputCapture.enable(outputting_json_stdout) printer_classes = choose_printers(args, all_printer_classes) detector_classes = choose_detectors(args, all_detector_classes) default_log = logging.INFO if not args.debug else logging.DEBUG for (l_name, l_level) in [ ("Fortress", default_log), ("Contract", default_log), ("Function", default_log), ("Node", default_log), ("Parsing", default_log), ("Detectors", default_log), ("FunctionSolc", default_log), ("ExpressionParsing", default_log), ("TypeParsing", default_log), ("SSA_Conversion", default_log), ("Printers", default_log), # ('CryticCompile', default_log) ]: logger_level = logging.getLogger(l_name) logger_level.setLevel(l_level) console_handler = logging.StreamHandler() console_handler.setLevel(logging.INFO) console_handler.setFormatter(FormatterCryticCompile()) crytic_compile_error = logging.getLogger(("CryticCompile")) crytic_compile_error.addHandler(console_handler) crytic_compile_error.propagate = False crytic_compile_error.setLevel(logging.INFO) results_detectors = [] results_printers = [] try: filename = args.filename # Determine if we are handling ast from solc if args.solc_ast or (filename.endswith(".json") and not is_supported(filename)): globbed_filenames = glob.glob(filename, recursive=True) filenames = glob.glob(os.path.join(filename, "*.json")) if not filenames: filenames = globbed_filenames number_contracts = 0 fortress_instances = [] if args.splitted: ( fortress_instance, results_detectors, results_printers, number_contracts, ) = process_from_asts(filenames, args, detector_classes, printer_classes) fortress_instances.append(fortress_instance) else: for filename in filenames: ( fortress_instance, results_detectors_tmp, results_printers_tmp, number_contracts_tmp, ) = process_single(filename, args, detector_classes, printer_classes) number_contracts += number_contracts_tmp results_detectors += results_detectors_tmp results_printers += results_printers_tmp fortress_instances.append(fortress_instance) # Rely on CryticCompile to discern the underlying type of compilations. else: ( fortress_instances, results_detectors, results_printers, number_contracts, ) = process_all(filename, args, detector_classes, printer_classes) # Determine if we are outputting JSON if outputting_json or outputting_zip: # Add our compilation information to JSON if "compilations" in args.json_types: compilation_results = [] for fortress_instance in fortress_instances: compilation_results.append( generate_standard_export( fortress_instance.crytic_compile)) json_results["compilations"] = compilation_results # Add our detector results to JSON if desired. if results_detectors and "detectors" in args.json_types: json_results["detectors"] = results_detectors # Add our printer results to JSON if desired. if results_printers and "printers" in args.json_types: json_results["printers"] = results_printers # Add our detector types to JSON if "list-detectors" in args.json_types: detectors, _ = get_detectors_and_printers() json_results["list-detectors"] = output_detectors_json( detectors) # Add our detector types to JSON if "list-printers" in args.json_types: _, printers = get_detectors_and_printers() json_results["list-printers"] = output_printers_json(printers) # Output our results to markdown if we wish to compile a checklist. if args.checklist: output_results_to_markdown(results_detectors) # Dont print the number of result for printers if number_contracts == 0: logger.warning(red("No contract was analyzed")) if printer_classes: logger.info("%s analyzed (%d contracts)", filename, number_contracts) else: logger.info( "%s analyzed (%d contracts with %d detectors), %d result(s) found", filename, number_contracts, len(detector_classes), len(results_detectors), ) if args.ignore_return_value: return except FortressException as fortress_exception: output_error = str(fortress_exception) traceback.print_exc() logging.error(red("Error:")) logging.error(red(output_error)) logging.error( "Please report an issue to https://github.com/crytic/fortress/issues" ) except Exception: # pylint: disable=broad-except output_error = traceback.format_exc() logging.error(traceback.print_exc()) logging.error(f"Error in {args.filename}") # pylint: disable=logging-fstring-interpolation logging.error(output_error) # If we are outputting JSON, capture the redirected output and disable the redirect to output the final JSON. if outputting_json: if "console" in args.json_types: json_results["console"] = { "stdout": StandardOutputCapture.get_stdout_output(), "stderr": StandardOutputCapture.get_stderr_output(), } StandardOutputCapture.disable() output_to_json(None if outputting_json_stdout else args.json, output_error, json_results) if outputting_zip: output_to_zip(args.zip, output_error, json_results, args.zip_type) if args.perf: cp.disable() stats = pstats.Stats(cp).sort_stats("cumtime") stats.print_stats() # Exit with the appropriate status code if output_error: sys.exit(-1) else: my_exit(results_detectors)
def generate_erc20( contract: Contract, type_property: str, addresses: Addresses ): # pylint: disable=too-many-locals """ Generate the ERC20 tests Files generated: - interfaces.sol: generic crytic interface - Properties[CONTRACTNAME].sol: erc20 properties - Test[CONTRACTNAME].sol: Target, its constructor needs to be manually updated - If truffle - migrations/x_Test[CONTRACTNAME].js - test/crytic/InitializationTest[CONTRACTNAME].js: unit tests to check that the contract is correctly configured - test/crytic/Test[CONTRACTNAME].js: ERC20 checks - echidna_config.yaml: configuration file :param addresses: :param contract: :param type_property: One of ERC20_PROPERTIES.keys() :return: """ if contract.fortress.crytic_compile is None: logging.error("Please compile with crytic-compile") return if contract.fortress.crytic_compile.type not in [ PlatformType.TRUFFLE, PlatformType.SOLC, ]: logging.error(f"{contract.fortress.crytic_compile.type} not yet supported by fortress-prop") return # Check if the contract is an ERC20 contract and if the functions have the correct visibility errors = _check_compatibility(contract) if errors: logger.error(red(errors)) return erc_properties = ERC20_PROPERTIES.get(type_property, None) if erc_properties is None: logger.error(f"{type_property} unknown. Types available {ERC20_PROPERTIES.keys()}") return properties = erc_properties.properties # Generate the output directory output_dir = _platform_to_output_dir(contract.fortress.crytic_compile.platform) output_dir.mkdir(exist_ok=True) # Get the properties solidity_properties, unit_tests = _get_properties(contract, properties) # Generate the contract containing the properties generate_solidity_interface(output_dir, addresses) property_file = generate_solidity_properties( contract, type_property, solidity_properties, output_dir ) # Generate the Test contract initialization_recommendation = _initialization_recommendation(type_property) contract_filename, contract_name = generate_test_contract( contract, type_property, output_dir, property_file, initialization_recommendation, ) # Generate Echidna config file echidna_config_filename = generate_echidna_config( Path(contract.fortress.crytic_compile.target).parent, addresses ) unit_test_info = "" # If truffle, generate unit tests if contract.fortress.crytic_compile.type == PlatformType.TRUFFLE: unit_test_info = generate_truffle_test(contract, type_property, unit_tests, addresses) logger.info("################################################") logger.info(green(f"Update the constructor in {Path(output_dir, contract_filename)}")) if unit_test_info: logger.info(green(unit_test_info)) logger.info(green("To run Echidna:")) txt = f"\t echidna-test {contract.fortress.crytic_compile.target} " txt += f"--contract {contract_name} --config {echidna_config_filename}" logger.info(green(txt))