def write_file( output_dir: Path, filename: str, content: str, allow_overwrite: bool = True, discard_if_exist: bool = False, ): """ Write the content into output_dir/filename :param output_dir: :param filename: :param content: :param allow_overwrite: If true, allows to overwrite existing file (default: true). Emit warning if overwrites :param discard_if_exist: If true, it will not emit warning or overwrite the file if it exists, (default: False) :return: """ file_to_write = Path(output_dir, filename) if file_to_write.exists(): if discard_if_exist: return if not allow_overwrite: logger.info( yellow( f"{file_to_write} already exist and will not be overwritten" )) return logger.info(yellow(f"Overwrite {file_to_write}")) else: logger.info(green(f"Write {file_to_write}")) with open(file_to_write, "w") as f: f.write(content)
def output_to_json(filename: str, error, results: Dict): """ :param filename: Filename where the json will be written. If None or "-", write to stdout :param error: Error to report :param results: Results to report :param logger: Logger where to log potential info :return: """ # Create our encapsulated JSON result. json_result = {"success": error is None, "error": error, "results": results} if filename == "-": filename = None # Determine if we should output to stdout if filename is None: # Write json to console print(json.dumps(json_result)) else: # Write json to file if os.path.isfile(filename): logger.info(yellow(f"{filename} exists already, the overwrite is prevented")) else: with open(filename, "w", encoding="utf8") as f: json.dump(json_result, f, indent=2)
def get_summary_erc20(self, contract): txt = "" pause, mint_unlimited, race_condition_mitigated = self._get_summary_erc20( contract) if pause: txt += yellow("Pausable") + "\n" if mint_unlimited is None: txt += green("No Minting") + "\n" else: if mint_unlimited: txt += red("∞ Minting") + "\n" else: txt += yellow("Minting") + "\n" if not race_condition_mitigated: txt += red("Approve Race Cond.") + "\n" return txt
def fortress_format(fortress, **kwargs): # pylint: disable=too-many-locals """' Keyword Args: detectors_to_run (str): Comma-separated list of detectors, defaults to all """ detectors_to_run = choose_detectors( kwargs.get("detectors_to_run", "all"), kwargs.get("detectors_to_exclude", "") ) for detector in detectors_to_run: fortress.register_detector(detector) fortress.generate_patches = True detector_results = fortress.run_detectors() detector_results = [x for x in detector_results if x] # remove empty results detector_results = [item for sublist in detector_results for item in sublist] # flatten export = Path("crytic-export", "patches") export.mkdir(parents=True, exist_ok=True) counter_result = 0 logger.info(yellow("fortress-format is in beta, carefully review each patch before merging it.")) for result in detector_results: if not "patches" in result: continue one_line_description = result["description"].split("\n")[0] export_result = Path(export, f"{counter_result}") export_result.mkdir(parents=True, exist_ok=True) counter_result += 1 counter = 0 logger.info(f"Issue: {one_line_description}") logger.info(f"Generated: ({export_result})") for (_, diff,) in result["patches_diff"].items(): filename = f"fix_{counter}.patch" path = Path(export_result, filename) logger.info(f"\t- {filename}") with open(path, "w") as f: f.write(diff) counter += 1
def read_config_file(args): if os.path.isfile(args.config_file): try: with open(args.config_file) as f: config = json.load(f) for key, elem in config.items(): if key not in defaults_flag_in_config: logger.info( yellow("{} has an unknown key: {} : {}".format( args.config_file, key, elem))) continue if getattr(args, key) == defaults_flag_in_config[key]: setattr(args, key, elem) except json.decoder.JSONDecodeError as e: logger.error( red("Impossible to read {}, please check the file {}".format( args.config_file, e)))
def output_to_zip(filename: str, error: Optional[str], results: Dict, zip_type: str = "lzma"): """ Output the results to a zip The file in the zip is named fortress_results.json Note: the json file will not have indentation, as a result the resulting json file will be smaller :param zip_type: :param filename: :param error: :param results: :return: """ json_result = {"success": error is None, "error": error, "results": results} if os.path.isfile(filename): logger.info(yellow(f"{filename} exists already, the overwrite is prevented")) else: with ZipFile( filename, "w", compression=ZIP_TYPES_ACCEPTED.get(zip_type, zipfile.ZIP_LZMA), ) as file_desc: file_desc.writestr("fortress_results.json", json.dumps(json_result).encode("utf8"))
def get_detectors_result( self) -> Tuple[str, List[Dict], int, int, int, int, int]: ( all_results, optimization, informational, low, medium, high, ) = self._get_detectors_result() txt = "Number of optimization issues: {}\n".format(green(optimization)) txt += "Number of informational issues: {}\n".format( green(informational)) txt += "Number of low issues: {}\n".format(green(low)) if medium > 0: txt += "Number of medium issues: {}\n".format(yellow(medium)) else: txt += "Number of medium issues: {}\n".format(green(medium)) if high > 0: txt += "Number of high issues: {}\n".format(red(high)) else: txt += "Number of high issues: {}\n\n".format(green(high)) return txt, all_results, optimization, informational, low, medium, high
def detect(self): all_results = self._detect() # Keep only dictionaries all_results = [r.data for r in all_results] results = [] # only keep valid result, and remove dupplicate # pylint: disable=expression-not-assigned [ results.append(r) for r in all_results if self.fortress.valid_result(r) and r not in results ] if results: if self.logger: info = "\n" for idx, result in enumerate(results): if self.fortress.triage_mode: info += "{}: ".format(idx) info += result["description"] info += "Reference: {}".format(self.WIKI) self._log(info) if self.fortress.generate_patches: for result in results: try: self._format(self.fortress, result) if not "patches" in result: continue result["patches_diff"] = dict() for file in result["patches"]: original_txt = self.fortress.source_code[file].encode( "utf8") patched_txt = original_txt offset = 0 patches = result["patches"][file] patches.sort(key=lambda x: x["start"]) if not all(patches[i]["end"] <= patches[i + 1]["end"] for i in range(len(patches) - 1)): self._log( f"Impossible to generate patch; patches collisions: {patches}" ) continue for patch in patches: patched_txt, offset = apply_patch( patched_txt, patch, offset) diff = create_diff(self.fortress, original_txt, patched_txt, file) if not diff: self._log( f"Impossible to generate patch; empty {result}" ) else: result["patches_diff"][file] = diff except FormatImpossible as exception: self._log( f'\nImpossible to patch:\n\t{result["description"]}\t{exception}' ) if results and self.fortress.triage_mode: while True: indexes = input( 'Results to hide during next runs: "0,1,...,{}" or "All" (enter to not hide results): ' .format(len(results))) if indexes == "All": self.fortress.save_results_to_hide(results) return [] if indexes == "": return results if indexes.startswith("["): indexes = indexes[1:] if indexes.endswith("]"): indexes = indexes[:-1] try: indexes = [int(i) for i in indexes.split(",")] self.fortress.save_results_to_hide([ r for (idx, r) in enumerate(results) if idx in indexes ]) return [ r for (idx, r) in enumerate(results) if idx not in indexes ] except ValueError: self.logger.error( yellow( "Malformed input. Example of valid input: 0,1,2,3") ) return results