Exemplo n.º 1
0
def _get_vyper_ast(filename: str,
                   vyper: str,
                   env=None,
                   working_dir=None) -> Dict:
    if not os.path.isfile(filename):
        raise InvalidCompilation(
            "{} does not exist (are you in the correct directory?)".format(
                filename))

    cmd = [vyper, filename, "-f", "ast"]

    additional_kwargs = {"cwd": working_dir} if working_dir else {}
    try:
        process = subprocess.Popen(cmd,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   env=env,
                                   **additional_kwargs)
    except Exception as exception:
        # pylint: disable=raise-missing-from
        raise InvalidCompilation(exception)

    stdout, stderr = process.communicate()

    try:
        res = stdout.split(b"\n")
        res = res[-2]
        return json.loads(res)

    except json.decoder.JSONDecodeError:
        # pylint: disable=raise-missing-from
        raise InvalidCompilation(f"Invalid vyper compilation\n{stderr}")
Exemplo n.º 2
0
def _run_vyper(filename: str, vyper: str, env: Dict = None, working_dir: str = None) -> Dict:
    if not os.path.isfile(filename):
        raise InvalidCompilation(
            "{} does not exist (are you in the correct directory?)".format(filename)
        )

    cmd = [vyper, filename, "-f", "combined_json"]

    additional_kwargs: Dict = {"cwd": working_dir} if working_dir else {}
    try:
        process = subprocess.Popen(
            cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, **additional_kwargs
        )
    except OSError as error:
        raise InvalidCompilation(error)

    stdout, stderr = process.communicate()

    try:
        res = stdout.split(b"\n")
        res = res[-2]
        return json.loads(res)

    except json.decoder.JSONDecodeError:
        raise InvalidCompilation(f"Invalid vyper compilation\n{stderr}")
Exemplo n.º 3
0
def _get_version(truffle_call: List[str], cwd: str) -> Tuple[str, str]:
    cmd = truffle_call + ["version"]
    try:
        with subprocess.Popen(cmd,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE,
                              cwd=cwd) as process:
            sstdout, _ = process.communicate()
            ssstdout = sstdout.decode(
            )  # convert bytestrings to unicode strings
            if not ssstdout:
                raise InvalidCompilation(
                    "Truffle failed to run: 'truffle version'")
            stdout = ssstdout.split("\n")
            for line in stdout:
                if "Solidity" in line:
                    if "native" in line:
                        return solc.get_version("solc", dict()), "solc-native"
                    version = re.findall(r"\d+\.\d+\.\d+", line)[0]
                    compiler = re.findall(r"(solc[a-z\-]*)", line)
                    if len(compiler) > 0:
                        return version, compiler[0]

            raise InvalidCompilation(f"Solidity version not found {stdout}")
    except OSError as error:
        # pylint: disable=raise-missing-from
        raise InvalidCompilation(f"Truffle failed: {error}")
Exemplo n.º 4
0
def _run_solc_standard_json(solc_input: Dict,
                            solc: str,
                            solc_disable_warnings=False,
                            working_dir=None):
    """
    Note: Ensure that crytic_compile.compiler_version is set prior calling _run_solc

    :param solc_input:
    :param solc:
    :param solc_disable_warnings:
    :param working_dir:
    :return:
    """
    cmd = [solc, "--standard-json", "--allow-paths", "."]
    additional_kwargs = {"cwd": working_dir} if working_dir else {}

    try:
        process = subprocess.Popen(
            cmd,
            stdin=subprocess.PIPE,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            **additional_kwargs,
        )
    except OSError as error:
        # pylint: disable=raise-missing-from
        raise InvalidCompilation(error)
    stdout_b, stderr_b = process.communicate(
        json.dumps(solc_input).encode("utf-8"))
    stdout, stderr = (
        stdout_b.decode(),
        stderr_b.decode(),
    )  # convert bytestrings to unicode strings

    try:
        solc_json_output = json.loads(stdout)

        # Check for errors and raise them if any exist.
        solc_errors = solc_json_output.get("errors", [])
        if solc_errors:
            solc_error_occurred = False
            solc_exception_str = ""
            for solc_error in solc_errors:
                if solc_error["severity"] != "warning":
                    solc_error_occurred = True
                elif solc_disable_warnings:
                    continue
                solc_exception_str += (
                    f"{solc_error.get('type', 'UnknownExceptionType')}: "
                    f"{solc_error.get('formattedMessage', 'N/A')}\n")

            if solc_error_occurred:
                raise InvalidCompilation(solc_exception_str)
            if solc_exception_str:
                LOGGER.warning(solc_exception_str)

        return solc_json_output
    except json.decoder.JSONDecodeError:
        # pylint: disable=raise-missing-from
        raise InvalidCompilation(f"Invalid solc compilation {stderr}")
Exemplo n.º 5
0
def get_version(solc: str, env: Optional[Dict[str, str]]) -> str:
    """Obtains the version of the solc executable specified.

    Args:
        solc (str): The solc executable name to invoke.
        env (Optional[Dict[str, str]]): An optional environment key-value store which can be used when invoking the solc executable.

    Raises:
        InvalidCompilation: If solc failed to run

    Returns:
        str: Returns the version of the provided solc executable.
    """

    cmd = [solc, "--version"]
    try:
        with subprocess.Popen(
                cmd,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                env=env,
                executable=shutil.which(cmd[0]),
        ) as process:
            stdout_bytes, _ = process.communicate()
            stdout = stdout_bytes.decode(
            )  # convert bytestrings to unicode strings
            version = re.findall(r"\d+\.\d+\.\d+", stdout)
            if len(version) == 0:
                raise InvalidCompilation(
                    f"Solidity version not found: {stdout}")
            return version[0]
    except OSError as error:
        # pylint: disable=raise-missing-from
        raise InvalidCompilation(error)
Exemplo n.º 6
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
        """Run the compilation

        Args:
            crytic_compile (CryticCompile): Associated CryticCompile object
            **kwargs: optional arguments. Used "brownie_ignore_compile", "ignore_compile"

        Raises:
            InvalidCompilation: If brownie failed to run
        """
        build_directory = Path("build", "contracts")
        brownie_ignore_compile = kwargs.get("brownie_ignore_compile",
                                            False) or kwargs.get(
                                                "ignore_compile", False)

        base_cmd = ["brownie"]

        if not brownie_ignore_compile:
            cmd = base_cmd + ["compile"]
            try:
                with subprocess.Popen(
                        cmd,
                        stdout=subprocess.PIPE,
                        stderr=subprocess.PIPE,
                        cwd=self._target,
                        executable=shutil.which(cmd[0]),
                ) as process:
                    stdout_bytes, stderr_bytes = process.communicate()
                    stdout, stderr = (
                        stdout_bytes.decode(),
                        stderr_bytes.decode(),
                    )  # convert bytestrings to unicode strings

                    LOGGER.info(stdout)
                    if stderr:
                        LOGGER.error(stderr)

            except OSError as error:
                # pylint: disable=raise-missing-from
                raise InvalidCompilation(error)

        if not os.path.isdir(os.path.join(self._target, build_directory)):
            raise InvalidCompilation(
                "`brownie compile` failed. Can you run it?")

        filenames = list(Path(self._target, build_directory).rglob("*.json"))

        _iterate_over_files(crytic_compile, Path(self._target), filenames)
Exemplo n.º 7
0
def _get_version_from_config(builder_directory: Path) -> Tuple[str, str, bool]:
    """Parse the compiler version

    Args:
        builder_directory (Path): path to the project's directory

    Raises:
        InvalidCompilation: If the configuration file was not found

    Returns:
        Tuple[str, str, bool]: (compiler_name,compiler_version,is_optimized)
    """

    #    :return: (version, optimized)

    path_config = Path(builder_directory, "last-solc-config.json")
    if not path_config.exists():
        path_config = Path(builder_directory, "last-vyper-config.json")
        if not path_config.exists():
            raise InvalidCompilation(f"{path_config} not found")
        with open(path_config, "r", encoding="utf8") as config_f:
            version = config_f.read()
            return "vyper", version, False
    with open(path_config, "r", encoding="utf8") as config_f:
        config = json.load(config_f)

    version = config["solc"]["version"]

    optimized = "optimizer" in config["solc"] and config["solc"]["optimizer"]
    return "solc", version, optimized
Exemplo n.º 8
0
def _run_solcs_env(
    compilation_unit: "CompilationUnit",
    filename,
    solc,
    solc_disable_warnings,
    solc_arguments,
    solc_remaps=None,
    env=None,
    working_dir=None,
    solcs_env=None,
    force_legacy_json=False,
):
    env = dict(os.environ) if env is None else env
    targets_json = None
    guessed_solcs = _guess_solc(filename, working_dir)
    for guessed_solc in guessed_solcs:
        if not guessed_solc in solcs_env:
            continue
        try:
            env["SOLC_VERSION"] = guessed_solc
            targets_json = _run_solc(
                compilation_unit,
                filename,
                solc,
                solc_disable_warnings,
                solc_arguments,
                solc_remaps=solc_remaps,
                env=env,
                working_dir=working_dir,
                force_legacy_json=force_legacy_json,
            )
        except InvalidCompilation:
            pass

    if not targets_json:
        solc_versions_env = solcs_env

        for version_env in solc_versions_env:
            try:
                env["SOLC_VERSION"] = version_env
                targets_json = _run_solc(
                    compilation_unit,
                    filename,
                    solc,
                    solc_disable_warnings,
                    solc_arguments,
                    solc_remaps=solc_remaps,
                    env=env,
                    working_dir=working_dir,
                    force_legacy_json=force_legacy_json,
                )
            except InvalidCompilation:
                pass

    if not targets_json:
        raise InvalidCompilation(
            "Invalid solc compilation, none of the solc versions provided worked"
        )

    return targets_json
Exemplo n.º 9
0
def _run_solcs_path(
    compilation_unit: "CompilationUnit",
    filename,
    solcs_path,
    solc_disable_warnings,
    solc_arguments,
    solc_remaps=None,
    env=None,
    working_dir=None,
    force_legacy_json=False,
):
    targets_json = None
    if isinstance(solcs_path, dict):
        guessed_solcs = _guess_solc(filename, working_dir)
        for guessed_solc in guessed_solcs:
            if not guessed_solc in solcs_path:
                continue
            try:
                targets_json = _run_solc(
                    compilation_unit,
                    filename,
                    solcs_path[guessed_solc],
                    solc_disable_warnings,
                    solc_arguments,
                    solc_remaps=solc_remaps,
                    env=env,
                    working_dir=working_dir,
                    force_legacy_json=force_legacy_json,
                )
            except InvalidCompilation:
                pass

    if not targets_json:
        solc_bins = solcs_path.values() if isinstance(solcs_path,
                                                      dict) else solcs_path

        for solc_bin in solc_bins:
            try:
                targets_json = _run_solc(
                    compilation_unit,
                    filename,
                    solc_bin,
                    solc_disable_warnings,
                    solc_arguments,
                    solc_remaps=solc_remaps,
                    env=env,
                    working_dir=working_dir,
                    force_legacy_json=force_legacy_json,
                )
            except InvalidCompilation:
                pass

    if not targets_json:
        raise InvalidCompilation(
            "Invalid solc compilation, none of the solc versions provided worked"
        )

    return targets_json
Exemplo n.º 10
0
def _get_version(compiler: str, cwd: str, config=None) -> str:
    version = ""
    if config is not None and "solcVersion" in config:
        version = re.findall(r"\d+\.\d+\.\d+", config["solcVersion"])[0]

    elif compiler == "dockerized-solc":
        version = config["docker-tag"]

    elif compiler == "native":
        cmd = ["solc", "--version"]
        try:
            process = subprocess.Popen(cmd,
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.PIPE,
                                       cwd=cwd)
        except OSError as error:
            # pylint: disable=raise-missing-from
            raise InvalidCompilation(error)
        stdout_bytes, _ = process.communicate()
        stdout_txt = stdout_bytes.decode(
        )  # convert bytestrings to unicode strings
        stdout = stdout_txt.split("\n")
        for line in stdout:
            if "Version" in line:
                version = re.findall(r"\d+\.\d+\.\d+", line)[0]

    elif compiler in ["solc-js"]:
        cmd = ["solcjs", "--version"]
        try:
            process = subprocess.Popen(cmd,
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.PIPE,
                                       cwd=cwd)
        except OSError as error:
            # pylint: disable=raise-missing-from
            raise InvalidCompilation(error)
        stdout_bytes, _ = process.communicate()
        stdout_txt = stdout_bytes.decode(
        )  # convert bytestrings to unicode strings
        version = re.findall(r"\d+\.\d+\.\d+", stdout_txt)[0]

    else:
        raise InvalidCompilation(f"Solidity version not found {compiler}")

    return version
Exemplo n.º 11
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str):
        """
        Compile the target

        :param crytic_compile:
        :param target:
        :param kwargs:
        :return:
        """
        build_directory = Path("build", "contracts")
        brownie_ignore_compile = kwargs.get("brownie_ignore_compile",
                                            False) or kwargs.get(
                                                "ignore_compile", False)

        base_cmd = ["brownie"]

        if not brownie_ignore_compile:
            cmd = base_cmd + ["compile"]
            try:
                process = subprocess.Popen(cmd,
                                           stdout=subprocess.PIPE,
                                           stderr=subprocess.PIPE,
                                           cwd=self._target)
            except OSError as error:
                # pylint: disable=raise-missing-from
                raise InvalidCompilation(error)

            stdout_bytes, stderr_bytes = process.communicate()
            stdout, stderr = (
                stdout_bytes.decode(),
                stderr_bytes.decode(),
            )  # convert bytestrings to unicode strings

            LOGGER.info(stdout)
            if stderr:
                LOGGER.error(stderr)

        if not os.path.isdir(os.path.join(self._target, build_directory)):
            raise InvalidCompilation(
                "`brownie compile` failed. Can you run it?")

        filenames = glob.glob(
            os.path.join(self._target, build_directory, "*.json"))

        _iterate_over_files(crytic_compile, self._target, filenames)
Exemplo n.º 12
0
def get_version(solc: str) -> str:
    """
    Get the compiler verison used

    :param solc:
    :return:
    """
    cmd = [solc, "--version"]
    try:
        process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    except OSError as error:
        raise InvalidCompilation(error)
    stdout_bytes, _ = process.communicate()
    stdout = stdout_bytes.decode()  # convert bytestrings to unicode strings
    version = re.findall(r"\d+\.\d+\.\d+", stdout)
    if len(version) == 0:
        raise InvalidCompilation(f"Solidity version not found: {stdout}")
    return version[0]
Exemplo n.º 13
0
def _get_vyper_ast(
    filename: str, vyper: str, env: Optional[Dict] = None, working_dir: Optional[str] = None
) -> Dict:
    """Get ast from vyper

    Args:
        filename (str): vyper file
        vyper (str): vyper binary
        env (Dict, optional): Environment variables. Defaults to None.
        working_dir (str, optional): Working directory. Defaults to None.

    Raises:
        InvalidCompilation: If vyper failed to run

    Returns:
        Dict: [description]
    """
    if not os.path.isfile(filename):
        raise InvalidCompilation(f"{filename} does not exist (are you in the correct directory?)")

    cmd = [vyper, filename, "-f", "ast"]

    additional_kwargs: Dict = {"cwd": working_dir} if working_dir else {}
    stderr = ""
    try:
        with subprocess.Popen(
            cmd,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            env=env,
            executable=shutil.which(cmd[0]),
            **additional_kwargs,
        ) as process:
            stdout, stderr = process.communicate()
            res = stdout.split(b"\n")
            res = res[-2]
            return json.loads(res)
    except json.decoder.JSONDecodeError:
        # pylint: disable=raise-missing-from
        raise InvalidCompilation(f"Invalid vyper compilation\n{stderr}")
    except Exception as exception:
        # pylint: disable=raise-missing-from
        raise InvalidCompilation(exception)
Exemplo n.º 14
0
def export_to_truffle(crytic_compile: "CryticCompile",
                      **kwargs: str) -> List[str]:
    """Export to the truffle format

    Args:
        crytic_compile (CryticCompile): CryticCompile object to export
        **kwargs: optional arguments. Used: "export_dir"

    Raises:
        InvalidCompilation: If there are more than 1 compilation unit

    Returns:
        List[str]: Singleton with the generated directory
    """
    # Get our export directory, if it's set, we create the path.
    export_dir = kwargs.get("export_dir", "crytic-export")
    if export_dir and not os.path.exists(export_dir):
        os.makedirs(export_dir)

    compilation_units = list(crytic_compile.compilation_units.values())
    if len(compilation_units) != 1:
        raise InvalidCompilation("Truffle export require 1 compilation unit")
    compilation_unit = compilation_units[0]

    # Loop for each contract filename.
    results: List[Dict] = []
    for filename, contract_names in compilation_unit.filename_to_contracts.items(
    ):
        for contract_name in contract_names:
            # Create the informational object to output for this contract
            output = {
                "contractName":
                contract_name,
                "abi":
                compilation_unit.abi(contract_name),
                "bytecode":
                "0x" + compilation_unit.bytecode_init(contract_name),
                "deployedBytecode":
                "0x" + compilation_unit.bytecode_runtime(contract_name),
                "ast":
                compilation_unit.ast(filename.absolute),
                "userdoc":
                compilation_unit.natspec[contract_name].userdoc.export(),
                "devdoc":
                compilation_unit.natspec[contract_name].devdoc.export(),
            }
            results.append(output)

            # If we have an export directory, export it.

            path = os.path.join(export_dir, contract_name + ".json")
            with open(path, "w", encoding="utf8") as file_desc:
                json.dump(output, file_desc)

    return [export_dir]
Exemplo n.º 15
0
def _get_version(truffle_call: List[str], cwd: str) -> Tuple[str, str]:
    """Get the compiler version

    Args:
        truffle_call (List[str]): Command to run truffle
        cwd (str): Working directory to run truffle

    Raises:
        InvalidCompilation: If truffle failed, or the solidity version was not found

    Returns:
        Tuple[str, str]: (compiler version, compiler name)
    """
    cmd = truffle_call + ["version"]
    try:
        with subprocess.Popen(
                cmd,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                cwd=cwd,
                executable=shutil.which(cmd[0]),
        ) as process:
            sstdout, _ = process.communicate()
            ssstdout = sstdout.decode(
            )  # convert bytestrings to unicode strings
            if not ssstdout:
                raise InvalidCompilation(
                    "Truffle failed to run: 'truffle version'")
            stdout = ssstdout.split("\n")
            for line in stdout:
                if "Solidity" in line:
                    if "native" in line:
                        return solc.get_version("solc", {}), "solc-native"
                    version = re.findall(r"\d+\.\d+\.\d+", line)[0]
                    compiler = re.findall(r"(solc[a-z\-]*)", line)
                    if len(compiler) > 0:
                        return version, compiler[0]

            raise InvalidCompilation(f"Solidity version not found {stdout}")
    except OSError as error:
        # pylint: disable=raise-missing-from
        raise InvalidCompilation(f"Truffle failed: {error}")
Exemplo n.º 16
0
def _load_config(config_file: str) -> Dict:
    """
    Load the config file
    :param config_file:
    :return:
    """
    with open(config_file, "r") as file_desc:
        content = file_desc.read()

    if "module.exports" in content:
        raise InvalidCompilation("module.export to supported for waffle")
    return json.loads(content)
Exemplo n.º 17
0
def _get_version(truffle_call, cwd):
    cmd = truffle_call + ["version"]
    try:
        process = subprocess.Popen(cmd,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   cwd=cwd)
    except OSError as error:
        raise InvalidCompilation(f"Truffle failed: {error}")
    stdout, _ = process.communicate()
    stdout = stdout.decode()  # convert bytestrings to unicode strings
    if not stdout:
        raise InvalidCompilation(f"Truffle failed to run: 'truffle version'")
    stdout = stdout.split("\n")
    for line in stdout:
        if "Solidity" in line:
            if "native" in line:
                return solc.get_version("solc"), "solc-native"
            version = re.findall(r"\d+\.\d+\.\d+", line)[0]
            compiler = re.findall(r"(solc[a-z\-]*)", line)
            if len(compiler) > 0:
                return version, compiler

    raise InvalidCompilation(f"Solidity version not found {stdout}")
Exemplo n.º 18
0
def get_version(solc: str, env: Dict[str, str]) -> str:
    """
    Get the compiler version used

    :param solc:
    :return:
    """
    cmd = [solc, "--version"]
    try:
        with subprocess.Popen(cmd,
                              stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE,
                              env=env) as process:
            stdout_bytes, _ = process.communicate()
            stdout = stdout_bytes.decode(
            )  # convert bytestrings to unicode strings
            version = re.findall(r"\d+\.\d+\.\d+", stdout)
            if len(version) == 0:
                raise InvalidCompilation(
                    f"Solidity version not found: {stdout}")
            return version[0]
    except OSError as error:
        # pylint: disable=raise-missing-from
        raise InvalidCompilation(error)
Exemplo n.º 19
0
def _get_version_from_config(config: Path) -> Optional[Tuple[str, str, bool]]:
    """
    :return: (version, optimized)
    """
    if not config.exists():
        raise InvalidCompilation(f"{config} not found")
    with open(config) as config_f:
        config = json.load(config_f)

    # hardhat supports multiple config file, we dont at the moment
    version = list(config["files"].values())[0]["solcConfig"]["version"]

    optimized = list(config["files"].values()
                     )[0]["solcConfig"]["settings"]["optimizer"]["enabled"]
    return "solc", version, optimized
Exemplo n.º 20
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
        """Run the compilation

        Args:
            crytic_compile (CryticCompile): Associated CryticCompile object
            **kwargs: optional arguments. Used: "solc_working_dir", "solc_force_legacy_json"

        Raises:
            InvalidCompilation: If solc failed to run
        """

        solc_working_dir = kwargs.get("solc_working_dir", None)
        force_legacy_json = kwargs.get("solc_force_legacy_json", False)
        compilation_unit = CompilationUnit(crytic_compile, str(self._target))

        targets_json = _get_targets_json(compilation_unit, self._target,
                                         **kwargs)

        # there have been a couple of changes in solc starting from 0.8.x,
        if force_legacy_json and _is_at_or_above_minor_version(
                compilation_unit, 8):
            raise InvalidCompilation(
                "legacy JSON not supported from 0.8.x onwards")

        skip_filename = compilation_unit.compiler_version.version in [
            f"0.4.{x}" for x in range(0, 10)
        ]

        solc_handle_contracts(targets_json, skip_filename, compilation_unit,
                              self._target, solc_working_dir)

        if "sources" in targets_json:
            for path, info in targets_json["sources"].items():
                if skip_filename:
                    path = convert_filename(
                        self._target,
                        relative_to_short,
                        crytic_compile,
                        working_dir=solc_working_dir,
                    )
                else:
                    path = convert_filename(path,
                                            relative_to_short,
                                            crytic_compile,
                                            working_dir=solc_working_dir)
                compilation_unit.filenames.add(path)
                crytic_compile.filenames.add(path)
                compilation_unit.asts[path.absolute] = info["AST"]
Exemplo n.º 21
0
def _get_version_from_config(builder_directory: Path) -> Tuple[str, str, bool]:
    """
    :return: (version, optimized)
    """
    config = Path(builder_directory, "last-solc-config.json")
    if not config.exists():
        config = Path(builder_directory, "last-vyper-config.json")
        if not config.exists():
            raise InvalidCompilation(f"{config} not found")
        with open(config) as config_f:
            version = config_f.read()
            return "vyper", version, False
    with open(config) as config_f:
        config = json.load(config_f)

    version = config["solc"]["version"]

    optimized = "optimizer" in config["solc"] and config["solc"]["optimizer"]
    return "solc", version, optimized
Exemplo n.º 22
0
def _get_version_from_config(
        builder_directory: Path) -> Optional[Tuple[str, str, bool]]:
    """
    :return: (version, optimized)
    """
    config = Path(builder_directory, "last-solc-config.json")
    if not config.exists():
        config = Path(builder_directory, "last-vyper-config.json")
        if not config.exists():
            raise InvalidCompilation(f"{config} not found")
        with open(config) as config_f:
            version = config_f.read()
            return 'vyper', version, False
    with open(config) as config_f:
        config = json.load(config_f)

    version = config['solc']['version']

    optimized = 'optimizer' in config['solc'] and config['solc']['optimizer']
    return 'solc', version, optimized
Exemplo n.º 23
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str):
        """
        Compile the target

        :param crytic_compile:
        :param kwargs:
        :return:
        """

        solc_working_dir = kwargs.get("solc_working_dir", None)
        force_legacy_json = kwargs.get("solc_force_legacy_json", False)

        targets_json = _get_targets_json(crytic_compile, self._target, **kwargs)

        # there have been a couple of changes in solc starting from 0.8.x,
        if force_legacy_json and _is_at_or_above_minor_version(crytic_compile, 8):
            raise InvalidCompilation("legacy JSON not supported from 0.8.x onwards")

        skip_filename = crytic_compile.compiler_version.version in [
            f"0.4.{x}" for x in range(0, 10)
        ]

        _handle_contracts(
            targets_json, skip_filename, crytic_compile, self._target, solc_working_dir
        )

        if "sources" in targets_json:
            for path, info in targets_json["sources"].items():
                if skip_filename:
                    path = convert_filename(
                        self._target,
                        relative_to_short,
                        crytic_compile,
                        working_dir=solc_working_dir,
                    )
                else:
                    path = convert_filename(
                        path, relative_to_short, crytic_compile, working_dir=solc_working_dir
                    )
                crytic_compile.filenames.add(path)
                crytic_compile.asts[path.absolute] = info["AST"]
Exemplo n.º 24
0
def _run_etherlime(target: str, npx_disable: bool,
                   compile_arguments: Optional[str]) -> None:
    """Run etherlime

    Args:
        target (str): path to the target
        npx_disable (bool): true if npx should not be used
        compile_arguments (Optional[str]): additional arguments

    Raises:
        InvalidCompilation: if etherlime fails
    """
    cmd = ["etherlime", "compile", target, "deleteCompiledFiles=true"]

    if not npx_disable:
        cmd = ["npx"] + cmd

    if compile_arguments:
        cmd += compile_arguments.split(" ")

    try:
        with subprocess.Popen(
                cmd,
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                cwd=target,
                executable=shutil.which(cmd[0]),
        ) as process:
            stdout_bytes, stderr_bytes = process.communicate()
            stdout, stderr = (
                stdout_bytes.decode(),
                stderr_bytes.decode(),
            )  # convert bytestrings to unicode strings

            LOGGER.info(stdout)

            if stderr:
                LOGGER.error(stderr)
    except OSError as error:
        # pylint: disable=raise-missing-from
        raise InvalidCompilation(error)
Exemplo n.º 25
0
def _load_config(config_file: str) -> Dict:
    """Load the config file

    Args:
        config_file (str): config file to load

    Raises:
        InvalidCompilation: If the config file lacks "module.export"

    Returns:
        Dict: [description]
    """
    with open(
            config_file,
            "r",
            encoding="utf8",
    ) as file_desc:
        content = file_desc.read()

    if "module.exports" in content:
        raise InvalidCompilation("module.export is required for waffle")
    return json.loads(content)
Exemplo n.º 26
0
def compile(crytic_compile: "CryticCompile", target: str, **kwargs: Dict):
    """
    Compile the target
    :param crytic_compile:
    :param target:
    :param kwargs:
    :return:
    """
    build_directory = Path("build", "contracts")
    brownie_ignore_compile = kwargs.get("brownie_ignore_compile", False)
    crytic_compile.type = Type.TRUFFLE

    base_cmd = ["brownie"]

    if not brownie_ignore_compile:
        cmd = base_cmd + ["compile"]

        process = subprocess.Popen(cmd,
                                   stdout=subprocess.PIPE,
                                   stderr=subprocess.PIPE,
                                   cwd=target)

        stdout_bytes, stderr_bytes = process.communicate()
        stdout, stderr = (
            stdout_bytes.decode(),
            stderr_bytes.decode(),
        )  # convert bytestrings to unicode strings

        LOGGER.info(stdout)
        if stderr:
            LOGGER.error(stderr)

    if not os.path.isdir(os.path.join(target, build_directory)):
        raise InvalidCompilation("`brownie compile` failed. Can you run it?")

    filenames = glob.glob(os.path.join(target, build_directory, "*.json"))

    _iterate_over_files(crytic_compile, target, filenames)
Exemplo n.º 27
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str):
        """
        Compile the target

        :param kwargs:
        :return:
        """

        build_directory = kwargs.get("truffle_build_directory",
                                     os.path.join("build", "contracts"))
        truffle_ignore_compile = kwargs.get("truffle_ignore_compile",
                                            False) or kwargs.get(
                                                "ignore_compile", False)
        truffle_version = kwargs.get("truffle_version", None)
        # crytic_compile.type = Type.TRUFFLE
        # Truffle on windows has naming conflicts where it will invoke truffle.js directly instead
        # of truffle.cmd (unless in powershell or git bash).
        # The cleanest solution is to explicitly call
        # truffle.cmd. Reference:
        # https://truffleframework.com/docs/truffle/reference/configuration#resolving-naming-conflicts-on-windows

        truffle_overwrite_config = kwargs.get("truffle_overwrite_config",
                                              False)

        if platform.system() == "Windows":
            base_cmd = ["truffle.cmd"]
        elif kwargs.get("npx_disable", False):
            base_cmd = ["truffle"]
        else:
            base_cmd = ["npx", "truffle"]
            if truffle_version:
                if truffle_version.startswith("truffle"):
                    base_cmd = ["npx", truffle_version]
                else:
                    base_cmd = ["npx", f"truffle@{truffle_version}"]
            elif os.path.isfile(os.path.join(self._target, "package.json")):
                with open(os.path.join(self._target, "package.json"),
                          encoding="utf8") as file_desc:
                    package = json.load(file_desc)
                    if "devDependencies" in package:
                        if "truffle" in package["devDependencies"]:
                            version = package["devDependencies"]["truffle"]
                            if version.startswith("^"):
                                version = version[1:]
                            truffle_version = "truffle@{}".format(version)
                            base_cmd = ["npx", truffle_version]
                    if "dependencies" in package:
                        if "truffle" in package["dependencies"]:
                            version = package["dependencies"]["truffle"]
                            if version.startswith("^"):
                                version = version[1:]
                            truffle_version = "truffle@{}".format(version)
                            base_cmd = ["npx", truffle_version]

        if not truffle_ignore_compile:
            cmd = base_cmd + ["compile", "--all"]

            LOGGER.info(
                "'%s' running (use --truffle-version [email protected] to use specific version)",
                " ".join(cmd),
            )

            config_used = None
            config_saved = None
            if truffle_overwrite_config:
                overwritten_version = kwargs.get("truffle_overwrite_version",
                                                 None)
                # If the version is not provided, we try to guess it with the config file
                if overwritten_version is None:
                    version_from_config = _get_version_from_config(
                        self._target)
                    if version_from_config:
                        overwritten_version, _ = version_from_config

                # Save the config file, and write our temporary config
                config_used, config_saved = _save_config(Path(self._target))
                if config_used is None:
                    config_used = Path('truffle-config.js')
                _write_config(Path(self._target), config_used,
                              overwritten_version)

            process = subprocess.Popen(cmd,
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.PIPE,
                                       cwd=self._target)

            stdout_bytes, stderr_bytes = process.communicate()
            stdout, stderr = (
                stdout_bytes.decode(),
                stderr_bytes.decode(),
            )  # convert bytestrings to unicode strings

            if truffle_overwrite_config:
                _reload_config(Path(self._target), config_saved, config_used)

            LOGGER.info(stdout)
            if stderr:
                LOGGER.error(stderr)
        if not os.path.isdir(os.path.join(self._target, build_directory)):
            if os.path.isdir(os.path.join(self._target, "node_modules")):
                raise InvalidCompilation(
                    f"External dependencies {build_directory} {self._target} not found, please install them. (npm install)"
                )
            raise InvalidCompilation(
                "`truffle compile` failed. Can you run it?")
        filenames = glob.glob(
            os.path.join(self._target, build_directory, "*.json"))

        optimized = None

        version = None
        compiler = None

        for filename_txt in filenames:
            with open(filename_txt, encoding="utf8") as file_desc:
                target_loaded = json.load(file_desc)

                if optimized is None:
                    if "metadata" in target_loaded:
                        metadata = target_loaded["metadata"]
                        try:
                            metadata = json.loads(metadata)
                            if "settings" in metadata:
                                if "optimizer" in metadata["settings"]:
                                    if "enabled" in metadata["settings"][
                                            "optimizer"]:
                                        optimized = metadata["settings"][
                                            "optimizer"]["enabled"]
                        except json.decoder.JSONDecodeError:
                            pass

                userdoc = target_loaded.get("userdoc", {})
                devdoc = target_loaded.get("devdoc", {})
                natspec = Natspec(userdoc, devdoc)

                if not "ast" in target_loaded:
                    continue

                filename = target_loaded["ast"]["absolutePath"]
                try:
                    filename = convert_filename(filename,
                                                _relative_to_short,
                                                crytic_compile,
                                                working_dir=self._target)
                except InvalidCompilation as i:
                    txt = str(i)
                    txt += '\nConsider removing the build/contracts content (rm build/contracts/*)'
                    raise InvalidCompilation(txt)

                crytic_compile.asts[filename.absolute] = target_loaded["ast"]
                crytic_compile.filenames.add(filename)
                contract_name = target_loaded["contractName"]
                crytic_compile.natspec[contract_name] = natspec
                crytic_compile.contracts_filenames[contract_name] = filename
                crytic_compile.contracts_names.add(contract_name)
                crytic_compile.abis[contract_name] = target_loaded["abi"]
                crytic_compile.bytecodes_init[contract_name] = target_loaded[
                    "bytecode"].replace("0x", "")
                crytic_compile.bytecodes_runtime[
                    contract_name] = target_loaded["deployedBytecode"].replace(
                        "0x", "")
                crytic_compile.srcmaps_init[contract_name] = target_loaded[
                    "sourceMap"].split(";")
                crytic_compile.srcmaps_runtime[contract_name] = target_loaded[
                    "deployedSourceMap"].split(";")

                if compiler is None:
                    compiler = target_loaded.get("compiler",
                                                 {}).get("name", None)
                if version is None:
                    version = target_loaded.get("compiler",
                                                {}).get("version", None)
                    if "+" in version:
                        version = version[0:version.find("+")]

        if version is None or compiler is None:
            version_from_config = _get_version_from_config(self._target)
            if version_from_config:
                version, compiler = version_from_config
            else:
                version, compiler = _get_version(base_cmd, cwd=self._target)

        crytic_compile.compiler_version = CompilerVersion(compiler=compiler,
                                                          version=version,
                                                          optimized=optimized)
Exemplo n.º 28
0
def convert_filename(
    used_filename: Union[str, Path],
    relative_to_short,
    crytic_compile: "CryticCompile",
    working_dir=None,
) -> Filename:
    """
    Convert filename.
    The used_filename can be absolute, relative, or missing node_modules/contracts directory
    convert_filename return a tuple(absolute,used),
    where absolute points to the absolute path, and used the original

    :param used_filename:
    :param relative_to_short: lambda function
    :param crytic_compile:
    :param working_dir:
    :return: Filename (namedtuple)
    """
    filename_txt = used_filename
    if platform.system() == "Windows":
        elements = list(Path(filename_txt).parts)
        if elements[0] == "/" or elements[0] == "\\":
            elements = elements[1:]  # remove '/'
            elements[0] = elements[0] + ":/"  # add :/
        filename = Path(*elements)
    else:
        filename = Path(filename_txt)

    if working_dir is None:
        cwd = Path.cwd()
        working_dir = cwd
    else:
        working_dir = Path(working_dir)
        if working_dir.is_absolute():
            cwd = working_dir
        else:
            cwd = Path.cwd().joinpath(Path(working_dir)).resolve()

    if crytic_compile.package_name:
        try:
            filename = filename.relative_to(Path(crytic_compile.package_name))
        except ValueError:
            pass

    if not filename.exists():
        if cwd.joinpath(Path("node_modules"), filename).exists():
            filename = cwd.joinpath("node_modules", filename)
        elif cwd.joinpath(Path("contracts"), filename).exists():
            filename = cwd.joinpath("contracts", filename)
        elif working_dir.joinpath(filename).exists():
            filename = working_dir.joinpath(filename)
        else:
            raise InvalidCompilation(f"Unknown file: {filename}")
    elif not filename.is_absolute():
        filename = cwd.joinpath(filename)

    absolute = filename
    relative = Path(os.path.relpath(filename, Path.cwd()))

    # Build the short path
    try:
        if working_dir.is_absolute():
            short = absolute.relative_to(working_dir)
        else:
            short = relative.relative_to(working_dir)
    except ValueError:
        short = relative
    except RuntimeError:
        short = relative

    short = relative_to_short(short)

    return Filename(absolute=str(absolute),
                    relative=str(relative),
                    short=str(short),
                    used=used_filename)
Exemplo n.º 29
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str):
        """
        Compile the target

        :param crytic_compile:
        :param target:
        :param kwargs:
        :return:
        """

        etherlime_ignore_compile = kwargs.get("etherlime_ignore_compile",
                                              False) or kwargs.get(
                                                  "ignore_compile", False)

        build_directory = "build"

        compile_arguments = kwargs.get("etherlime_compile_arguments", None)

        if not etherlime_ignore_compile:
            cmd = [
                "etherlime", "compile", self._target,
                "deleteCompiledFiles=true"
            ]

            if not kwargs.get("npx_disable", False):
                cmd = ["npx"] + cmd

            if compile_arguments:
                cmd += compile_arguments.split(" ")

            try:
                process = subprocess.Popen(cmd,
                                           stdout=subprocess.PIPE,
                                           stderr=subprocess.PIPE,
                                           cwd=self._target)
            except OSError as error:
                # pylint: disable=raise-missing-from
                raise InvalidCompilation(error)

            stdout_bytes, stderr_bytes = process.communicate()
            stdout, stderr = (
                stdout_bytes.decode(),
                stderr_bytes.decode(),
            )  # convert bytestrings to unicode strings

            LOGGER.info(stdout)

            if stderr:
                LOGGER.error(stderr)

        # similar to truffle
        if not os.path.isdir(os.path.join(self._target, build_directory)):
            raise InvalidCompilation(
                "No truffle build directory found, did you run `truffle compile`?"
            )
        filenames = glob.glob(
            os.path.join(self._target, build_directory, "*.json"))

        version = None
        compiler = "solc-js"

        for file in filenames:
            with open(file, encoding="utf8") as file_desc:
                target_loaded = json.load(file_desc)

                if version is None:
                    if "compiler" in target_loaded:
                        if "version" in target_loaded["compiler"]:
                            version = re.findall(
                                r"\d+\.\d+\.\d+",
                                target_loaded["compiler"]["version"])[0]

                if not "ast" in target_loaded:
                    continue

                filename_txt = target_loaded["ast"]["absolutePath"]
                filename = convert_filename(filename_txt, _relative_to_short,
                                            crytic_compile)
                crytic_compile.asts[filename.absolute] = target_loaded["ast"]
                crytic_compile.filenames.add(filename)
                contract_name = target_loaded["contractName"]
                crytic_compile.contracts_filenames[contract_name] = filename
                crytic_compile.contracts_names.add(contract_name)
                crytic_compile.abis[contract_name] = target_loaded["abi"]
                crytic_compile.bytecodes_init[contract_name] = target_loaded[
                    "bytecode"].replace("0x", "")
                crytic_compile.bytecodes_runtime[
                    contract_name] = target_loaded["deployedBytecode"].replace(
                        "0x", "")
                crytic_compile.srcmaps_init[contract_name] = target_loaded[
                    "sourceMap"].split(";")
                crytic_compile.srcmaps_runtime[contract_name] = target_loaded[
                    "deployedSourceMap"].split(";")

                userdoc = target_loaded.get("userdoc", {})
                devdoc = target_loaded.get("devdoc", {})
                natspec = Natspec(userdoc, devdoc)
                crytic_compile.natspec[contract_name] = natspec

        crytic_compile.compiler_version = CompilerVersion(
            compiler=compiler,
            version=version,
            optimized=_is_optimized(compile_arguments))
Exemplo n.º 30
0
    def compile(self, crytic_compile: "CryticCompile", **kwargs: str):
        """
        Compile the target

        :param kwargs:
        :return:
        """

        hardhat_ignore_compile = kwargs.get("hardhat_ignore_compile",
                                            False) or kwargs.get(
                                                "ignore_compile", False)

        cache_directory = kwargs.get("hardhat_cache_directory", "cache")
        config_file = Path(cache_directory, "solidity-files-cache.json")

        build_directory = Path(
            kwargs.get("hardhat_cache_directory", "artifacts/build-info"))

        hardhat_working_dir = kwargs.get("hardhat_working_dir", None)

        base_cmd = ["hardhat"]
        if not kwargs.get("npx_disable", False):
            base_cmd = ["npx"] + base_cmd

        if not hardhat_ignore_compile:
            cmd = base_cmd + ["compile"]

            LOGGER.info(
                "'%s' running",
                " ".join(cmd),
            )

            process = subprocess.Popen(cmd,
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.PIPE,
                                       cwd=self._target)

            stdout_bytes, stderr_bytes = process.communicate()
            stdout, stderr = (
                stdout_bytes.decode(),
                stderr_bytes.decode(),
            )  # convert bytestrings to unicode strings

            LOGGER.info(stdout)
            if stderr:
                LOGGER.error(stderr)

        (compiler, version_from_config,
         optimized) = _get_version_from_config(config_file)

        crytic_compile.compiler_version = CompilerVersion(
            compiler=compiler,
            version=version_from_config,
            optimized=optimized)

        skip_filename = crytic_compile.compiler_version.version in [
            f"0.4.{x}" for x in range(0, 10)
        ]

        files = sorted(
            os.listdir(build_directory),
            key=lambda x: os.path.getmtime(Path(build_directory, x)))
        if not files:
            txt = f"`hardhat compile` failed. Can you run it?\n{build_directory} is empty"
            raise InvalidCompilation(txt)

        build_info = Path(build_directory, files[0])
        with open(build_info, encoding="utf8") as file_desc:
            targets_json = json.load(file_desc)["output"]

            if "contracts" in targets_json:
                for original_filename, contracts_info in targets_json[
                        "contracts"].items():
                    for original_contract_name, info in contracts_info.items():
                        contract_name = extract_name(original_contract_name)

                        contract_filename = convert_filename(
                            original_filename,
                            relative_to_short,
                            crytic_compile,
                            working_dir=hardhat_working_dir,
                        )

                        crytic_compile.contracts_names.add(contract_name)
                        crytic_compile.contracts_filenames[
                            contract_name] = contract_filename

                        crytic_compile.abis[contract_name] = info["abi"]
                        crytic_compile.bytecodes_init[contract_name] = info[
                            "evm"]["bytecode"]["object"]
                        crytic_compile.bytecodes_runtime[contract_name] = info[
                            "evm"]["deployedBytecode"]["object"]
                        crytic_compile.srcmaps_init[contract_name] = info[
                            "evm"]["bytecode"]["sourceMap"].split(";")
                        crytic_compile.srcmaps_runtime[contract_name] = info[
                            "evm"]["bytecode"]["sourceMap"].split(";")
                        userdoc = info.get("userdoc", {})
                        devdoc = info.get("devdoc", {})
                        natspec = Natspec(userdoc, devdoc)
                        crytic_compile.natspec[contract_name] = natspec

            if "sources" in targets_json:
                for path, info in targets_json["sources"].items():
                    if skip_filename:
                        path = convert_filename(
                            self._target,
                            relative_to_short,
                            crytic_compile,
                            working_dir=hardhat_working_dir,
                        )
                    else:
                        path = convert_filename(
                            path,
                            relative_to_short,
                            crytic_compile,
                            working_dir=hardhat_working_dir)
                    crytic_compile.filenames.add(path)
                    crytic_compile.asts[path.absolute] = info["ast"]