Esempio n. 1
0
def main():

    print(f"Brownie v{__version__} - Python development framework for Ethereum\n")

    if len(sys.argv) < 2 or sys.argv[1].startswith("-"):
        # this call triggers a SystemExit
        docopt(__doc__, ["brownie", "-h"])

    cmd = sys.argv[1]
    cmd_list = [i.stem for i in Path(__file__).parent.glob("[!_]*.py")]
    if cmd not in cmd_list:
        distances = sorted([(i, levenshtein_norm(cmd, i)) for i in cmd_list], key=lambda k: k[1])
        if distances[0][1] <= 0.2:
            sys.exit(f"Invalid command. Did you mean 'brownie {distances[0][0]}'?")
        sys.exit("Invalid command. Try 'brownie --help' for available commands.")

    CONFIG.argv["cli"] = cmd
    sys.modules["brownie"].a = network.accounts
    sys.modules["brownie"].__all__.append("a")

    try:
        importlib.import_module(f"brownie._cli.{cmd}").main()
    except ProjectNotFound:
        notify("ERROR", "Brownie environment has not been initiated for this folder.")
        print("Type 'brownie init' to create the file structure.")
    except Exception as e:
        print(color.format_tb(e))
Esempio n. 2
0
def _password(id_):
    a = accounts.load(id_)
    a.save(id_, overwrite=True)
    notify(
        "SUCCESS",
        f"Password has been changed for account '{color('bright blue')}{id_}{color}'"
    )
Esempio n. 3
0
def main():

    print(
        f"Brownie v{__version__} - Python development framework for Ethereum\n"
    )

    # remove options before calling docopt
    if len(sys.argv) > 1 and sys.argv[1][0] != "-":
        idx = next((sys.argv.index(i) for i in sys.argv if i.startswith("-")),
                   len(sys.argv))
        opts = sys.argv[idx:]
        sys.argv = sys.argv[:idx]
    args = docopt(__doc__)
    sys.argv += opts

    cmd_list = [i.stem for i in Path(__file__).parent.glob("[!_]*.py")]
    if args["<command>"] not in cmd_list:
        sys.exit(
            "Invalid command. Try 'brownie --help' for available commands.")

    ARGV["cli"] = args["<command>"]
    sys.modules["brownie"].a = network.accounts
    sys.modules["brownie"].__all__.append("a")

    try:
        importlib.import_module(f"brownie._cli.{args['<command>']}").main()
    except ProjectNotFound:
        notify("ERROR",
               "Brownie environment has not been initiated for this folder.")
        print("Type 'brownie init' to create the file structure.")
    except Exception as e:
        print(color.format_tb(e))
Esempio n. 4
0
def print_console_report(stdout_report) -> None:
    """Highlight and print a given stdout report to the console.

    This adds color formatting to the given stdout report and prints
    a summary of the vulnerabilities MythX has detected.

    :return: None
    """

    total_issues = sum(x for i in stdout_report.values() for x in i.values())
    if not total_issues:
        notify("SUCCESS", "No issues found!")
        return

    # display console report
    total_high_severity = sum(i.get("HIGH", 0) for i in stdout_report.values())
    if total_high_severity:
        notify(
            "WARNING",
            f"Found {total_issues} issues including {total_high_severity} high severity!"
        )
    else:
        print(f"Found {total_issues} issues:")
    for name in sorted(stdout_report):
        print(f"\n  contract: {color('bright magenta')}{name}{color}")
        for key in [
                i for i in ("HIGH", "MEDIUM", "LOW")
                if i in stdout_report[name]
        ]:
            c = color("bright red" if key == "HIGH" else "bright yellow")
            print(f"    {key.title()}: {c}{stdout_report[name][key]}{color}")
Esempio n. 5
0
def _release(project_path, registry_address, sender):
    network.connect("mainnet")
    with project_path.joinpath("ethpm-config.yaml").open() as fp:
        project_config = yaml.safe_load(fp)
    print("Generating manifest and pinning assets to IPFS...")
    manifest, uri = ethpm.create_manifest(project_path, project_config, True,
                                          False)
    if sender in accounts:
        account = accounts.at(sender)
    else:
        try:
            account = accounts.load(sender)
        except FileNotFoundError:
            raise UnknownAccount(f"Unknown account '{sender}'")
    name = f'{manifest["package_name"]}@{manifest["version"]}'
    print(f'Releasing {name} on "{registry_address}"...')
    try:
        tx = ethpm.release_package(registry_address, account,
                                   manifest["package_name"],
                                   manifest["version"], uri)
        if tx.status == 1:
            notify("SUCCESS", f"{name} has been released!")
            print(
                f"\nURI: {color('bright magenta')}ethpm://{registry_address}:1/{name}{color}"
            )
            return
    except Exception:
        pass
    notify(
        "ERROR",
        f'Transaction reverted when releasing {name} on "{registry_address}"')
Esempio n. 6
0
def _install(project_path, uri, replace=False):
    if replace:
        if replace.lower() not in ("true", "false"):
            raise ValueError("Invalid command for 'overwrite', must be True or False")
        replace = eval(replace.capitalize())
    print(f'Attempting to install package at "{color("bright magenta")}{uri}{color}"...')
    name = ethpm.install_package(project_path, uri, replace)
    notify("SUCCESS", f'The "{color("bright magenta")}{name}{color}" package was installed.')
Esempio n. 7
0
def _new(id_):
    pk = input("Enter the private key you wish to add: ")
    a = accounts.add(pk)
    a.save(id_)
    notify(
        "SUCCESS",
        f"A new account '{color('bright magenta')}{a.address}{color}'"
        f" has been generated with the id '{color('bright blue')}{id_}{color}'",
    )
Esempio n. 8
0
def _generate(id_):
    print("Generating a new private key...")
    a = accounts.add()
    a.save(id_)
    notify(
        "SUCCESS",
        f"A new account '{color('bright magenta')}{a.address}{color}'"
        f" has been generated with the id '{color('bright blue')}{id_}{color}'",
    )
Esempio n. 9
0
def _import(path_str, replace=False):
    if isinstance(replace, str):
        replace = eval(replace.capitalize())

    path = Path(path_str)
    with path.open() as fp:
        new_networks = yaml.safe_load(fp)

    with _get_data_folder().joinpath("network-config.yaml").open() as fp:
        old_networks = yaml.safe_load(fp)

    for value in new_networks.get("development", []):
        id_ = value["id"]
        if id_ in CONFIG.networks:
            if "cmd" not in CONFIG.networks[id_]:
                raise ValueError(
                    f"Import file contains development network with id '{id_}',"
                    " but this is already an existing live network.")
            if not replace:
                raise ValueError(f"Cannot overwrite existing network {id_}")
            old_networks["development"] = [
                i for i in old_networks["development"] if i["id"] != id_
            ]
        _validate_network(value, DEV_REQUIRED)
        old_networks["development"].append(value)

    for chain, value in [(i, x) for i in new_networks.get("live", [])
                         for x in i["networks"]]:
        prod = next(
            (i for i in old_networks["live"] if i["name"] == chain["name"]),
            None)
        if prod is None:
            prod = {"name": chain["name"], "networks": []}
            old_networks["live"].append(prod)
        id_ = value["id"]
        if id_ in CONFIG.networks:
            if not replace:
                raise ValueError(f"Cannot overwrite existing network {id_}")
            existing = next((i for i in prod["networks"] if i["id"] == id_),
                            None)
            if existing is None:
                raise ValueError(
                    f"Import file contains live network with id '{id_}',"
                    " but this is already an existing network on a different environment."
                )
            prod["networks"].remove(existing)
        _validate_network(value, PROD_REQUIRED)
        prod["networks"].append(value)

    with _get_data_folder().joinpath("network-config.yaml").open("w") as fp:
        yaml.dump(old_networks, fp)

    notify(
        "SUCCESS",
        f"Network settings imported from '{color('bright magenta')}{path}{color}'"
    )
Esempio n. 10
0
def _delete(package_id):
    org, repo, version = _split_id(package_id)
    source_path = _get_data_folder().joinpath(
        f"packages/{org}/{repo}@{version}")
    if not source_path.exists():
        raise FileNotFoundError(
            f"Package '{_format_pkg(org, repo, version)}' is not installed")
    shutil.rmtree(source_path)
    notify("SUCCESS",
           f"Package '{_format_pkg(org, repo, version)}' has been deleted")
Esempio n. 11
0
def _unlink(project_path, name):
    if ethpm.remove_package(project_path, name, False):
        notify(
            "SUCCESS",
            f'The "{color("bright magenta")}{name}{color}" package was unlinked.'
        )
        return
    notify(
        "ERROR",
        f'"{color("bright magenta")}{name}{color}" is not installed in this project.'
    )
Esempio n. 12
0
def _update_provider(name, url):
    with _get_data_folder().joinpath("providers-config.yaml").open() as fp:
        providers = yaml.safe_load(fp)

    providers[name] = {"host": url}

    with _get_data_folder().joinpath("providers-config.yaml").open("w") as fp:
        yaml.dump(providers, fp)

    notify(
        "SUCCESS",
        f"Provider '{color('bright magenta')}{name}{color}' has been updated")
Esempio n. 13
0
def _export(path_str):
    path = Path(path_str)
    if path.exists():
        if path.is_dir():
            path = path.joinpath("network-config.yaml")
        else:
            raise FileExistsError(f"{path} already exists")
    if not path.suffix:
        path = path.with_suffix(".yaml")
    shutil.copy(_get_data_folder().joinpath("network-config.yaml"), path)

    notify("SUCCESS", f"Network settings exported as '{color('bright magenta')}{path}{color}'")
Esempio n. 14
0
def _import(id_, path):
    source_path = Path(path).absolute()
    if not source_path.suffix:
        source_path = source_path.with_suffix(".json")
    dest_path = _get_data_folder().joinpath(f"accounts/{id_}.json")
    if dest_path.exists():
        raise FileExistsError(f"A keystore file already exists with the id '{id_}'")
    accounts.load(source_path)
    shutil.copy(source_path, dest_path)
    notify(
        "SUCCESS",
        f"Keystore '{color('bright magenta')}{source_path}{color}'"
        f" has been imported with the id '{color('bright blue')}{id_}{color}'",
    )
Esempio n. 15
0
def _export(id_, path):
    source_path = _get_data_folder().joinpath(f"accounts/{id_}.json")
    if not source_path.exists():
        raise FileNotFoundError(f"No keystore exists with the id '{id_}'")
    dest_path = Path(path).absolute()
    if not dest_path.suffix:
        dest_path = dest_path.with_suffix(".json")
    if dest_path.exists():
        raise FileExistsError(f"Export path {dest_path} already exists")
    shutil.copy(source_path, dest_path)
    notify(
        "SUCCESS",
        f"Account with id '{color('bright blue')}{id_}{color}' has been"
        f" exported to keystore '{color('bright magenta')}{dest_path}{color}'",
    )
Esempio n. 16
0
def _create(project_path, manifest_pathstr="manifest.json"):
    print("Generating a manifest based on configuration settings in ethpm-config.yaml...")
    with project_path.joinpath("ethpm-config.yaml").open() as fp:
        project_config = yaml.safe_load(fp)
    try:
        manifest = ethpm.create_manifest(project_path, project_config)[0]
    except Exception as e:
        notify("ERROR", f"{type(e).__name__}: {e}")
        print("Ensure that all package configuration settings are correct in ethpm-config.yaml")
        return
    with project_path.joinpath(manifest_pathstr).open("w") as fp:
        json.dump(manifest, fp, sort_keys=True, indent=2)
    notify(
        "SUCCESS",
        f'Generated manifest saved as "{color("bright magenta")}{manifest_pathstr}{color}"',
    )
Esempio n. 17
0
def _clone(package_id, path_str="."):
    org, repo, version = _split_id(package_id)
    source_path = _get_data_folder().joinpath(
        f"packages/{org}/{repo}@{version}")
    if not source_path.exists():
        raise FileNotFoundError(
            f"Package '{_format_pkg(org, repo, version)}' is not installed")
    dest_path = Path(path_str)
    if dest_path.exists():
        if not dest_path.is_dir():
            raise FileExistsError(f"Destination path already exists")
        dest_path = dest_path.joinpath(package_id)
    shutil.copytree(source_path, dest_path)
    notify(
        "SUCCESS",
        f"Package '{_format_pkg(org, repo, version)}' was cloned at {dest_path}"
    )
Esempio n. 18
0
def _delete_provider(name):
    with _get_data_folder().joinpath("providers-config.yaml").open() as fp:
        providers = yaml.safe_load(fp)

    if name not in providers.keys():
        raise ValueError(
            f"Provider '{color('bright magenta')}{name}{color}' does not exist"
        )

    del providers[name]

    with _get_data_folder().joinpath("providers-config.yaml").open("w") as fp:
        yaml.dump(providers, fp)

    notify(
        "SUCCESS",
        f"Provider '{color('bright magenta')}{name}{color}' has been deleted")
Esempio n. 19
0
def _delete(id_):
    if id_ not in CONFIG.networks:
        raise ValueError(f"Network '{color('bright magenta')}{id_}{color}' does not exist")

    with _get_data_folder().joinpath("network-config.yaml").open() as fp:
        networks = yaml.safe_load(fp)

    if "cmd" in CONFIG.networks[id_]:
        networks["development"] = [i for i in networks["development"] if i["id"] != id_]
    else:
        target = next(i for i in networks["live"] for x in i["networks"] if x["id"] == id_)
        target["networks"] = [i for i in target["networks"] if i["id"] != id_]
        networks["live"] = [i for i in networks["live"] if i["networks"]]

    with _get_data_folder().joinpath("network-config.yaml").open("w") as fp:
        yaml.dump(networks, fp)

    notify("SUCCESS", f"Network '{color('bright magenta')}{id_}{color}' has been deleted")
Esempio n. 20
0
def _add(env, id_, *args):
    if id_ in CONFIG.networks:
        raise ValueError(
            f"Network '{color('bright magenta')}{id_}{color}' already exists")

    args = _parse_args(args)

    if "name" not in args:
        args["name"] = id_

    with _get_data_folder().joinpath("network-config.yaml").open() as fp:
        networks = yaml.safe_load(fp)
    if env.lower() == "development":
        new = {
            "name": args.pop("name"),
            "id": id_,
            "cmd": args.pop("cmd"),
            "host": args.pop("host"),
        }
        if "timeout" in args:
            new["timeout"] = args.pop("timeout")
        new["cmd_settings"] = args
        _validate_network(new, DEV_REQUIRED)
        networks["development"].append(new)
    else:
        target = next(
            (i["networks"]
             for i in networks["live"] if i["name"].lower() == env.lower()),
            None)
        if target is None:
            networks["live"].append({"name": env, "networks": []})
            target = networks["live"][-1]["networks"]
        new = {"id": id_, **args}
        _validate_network(new, PROD_REQUIRED)
        target.append(new)
    with _get_data_folder().joinpath("network-config.yaml").open("w") as fp:
        yaml.dump(networks, fp)

    notify(
        "SUCCESS",
        f"A new network '{color('bright magenta')}{new['name']}{color}' has been added"
    )
    _print_verbose_network_description(new, True)
Esempio n. 21
0
def _modify(id_, *args):
    if id_ not in CONFIG.networks:
        raise ValueError(
            f"Network '{color('bright magenta')}{id_}{color}' does not exist")

    args = _parse_args(args)

    with _get_data_folder().joinpath("network-config.yaml").open() as fp:
        networks = yaml.safe_load(fp)

    is_dev = "cmd" in CONFIG.networks[id_]
    if is_dev:
        target = next(i for i in networks["development"] if i["id"] == id_)
    else:
        target = next(x for i in networks["live"] for x in i["networks"]
                      if x["id"] == id_)

    for key, value in args.items():
        t = target
        if key in DEV_CMD_SETTINGS and is_dev:
            t = target["cmd_settings"]
        if value is None:
            del t[key]
        else:
            t[key] = value
    if is_dev:
        _validate_network(target, DEV_REQUIRED)
    else:
        _validate_network(target, PROD_REQUIRED)

    if "name" not in target:
        target["name"] = id_

    with _get_data_folder().joinpath("network-config.yaml").open("w") as fp:
        yaml.dump(networks, fp)

    notify(
        "SUCCESS",
        f"Network '{color('bright magenta')}{target['name']}{color}' has been modified"
    )
    _print_verbose_network_description(target, True)
Esempio n. 22
0
def _list(project_path):
    installed, modified = ethpm.get_installed_packages(project_path)
    package_list = sorted(installed + modified)
    if not package_list:
        print("No packages are currently installed in this project.")
        return
    if modified:
        notify(
            "WARNING",
            f"One or more files in {len(modified)} packages have been modified since installation.",
        )
        print("Unlink or reinstall them to silence this warning.")
        print(f"Modified packages name are highlighted in {color['bright blue']}blue{color}.\n")
    print(f"Found {color('bright magenta')}{len(package_list)}{color} installed packages:")
    for name in package_list:
        u = "\u2514" if name == package_list[-1] else "\u251c"
        c = color("bright blue") if name in modified else color("bright white")
        print(
            f" {color('bright black')}{u}\u2500{c}{name[0]}{color}@"
            f"{color('bright white')}{name[1]}{color}"
        )
Esempio n. 23
0
def _install(uri):
    package_id = project.main.install_package(uri)
    org, repo, version = _split_id(package_id)
    notify("SUCCESS",
           f"Package '{_format_pkg(org, repo, version)}' has been installed")
Esempio n. 24
0
def main():
    args = docopt(__doc__)
    path = project.new(args["<path>"] or ".", args["--force"])
    notify("SUCCESS", f"Brownie environment has been initiated at {path}")
Esempio n. 25
0
def main():
    args = docopt(__doc__)
    _update_argv_from_docopt(args)

    project_path = project.check_for_project(".")
    if project_path is None:
        raise ProjectNotFound

    build = project.load()._build

    print("Preparing project data for submission to MythX...")
    contracts, libraries = get_contract_types(build)

    job_data = assemble_contract_jobs(build, contracts)
    job_data = update_contract_jobs_with_dependencies(build, contracts,
                                                      libraries, job_data)

    client, authenticated = get_mythx_client()

    job_uuids = send_to_mythx(job_data, client, authenticated)

    # exit if user wants an async analysis run
    if ARGV["async"] and authenticated:
        print(
            "\nAll contracts were submitted successfully. Check the dashboard at "
            "https://dashboard.mythx.io/ for the progress and results of your analyses"
        )
        return

    print("\nWaiting for results...")
    wait_for_jobs(job_uuids, client)

    # assemble report json
    source_to_name = get_contract_locations(build)
    highlight_report = {"highlights": {"MythX": {}}}
    stdout_report = {}
    for c, uuid in enumerate(job_uuids, start=1):
        print(
            f"Generating report for job {color['value']}{uuid}{color} ({c}/{len(job_uuids)})"
        )
        if authenticated:
            print("You can also check the results at {}{}\n".format(
                DASHBOARD_BASE_URL, uuid))

        update_report(client, uuid, highlight_report, stdout_report,
                      source_to_name)

    # erase previous report
    report_path = Path("reports/security.json")
    if report_path.exists():
        report_path.unlink()

    total_issues = sum(x for i in stdout_report.values() for x in i.values())
    if not total_issues:
        notify("SUCCESS", "No issues found!")
        return

    # display console report
    total_high_severity = sum(i.get("HIGH", 0) for i in stdout_report.values())
    if total_high_severity:
        notify(
            "WARNING",
            f"Found {total_issues} issues including {total_high_severity} high severity!"
        )
    else:
        print(f"Found {total_issues} issues:")
    for name in sorted(stdout_report):
        print(f"\n  contract: {color['contract']}{name}{color}")
        for key in [
                i for i in ("HIGH", "MEDIUM", "LOW")
                if i in stdout_report[name]
        ]:
            c = color("bright " + SEVERITY_COLOURS[key])
            print(f"    {key.title()}: {c}{stdout_report[name][key]}{color}")

    # Write report to Brownie directory
    with report_path.open("w+") as fp:
        json.dump(highlight_report, fp, indent=2, sort_keys=True)

    # Launch GUI if user requested it
    if ARGV["gui"]:
        print("Launching the Brownie GUI")
        Gui = importlib.import_module("brownie._gui").Gui
        Gui().mainloop()
Esempio n. 26
0
def main():
    args = docopt(__doc__)
    path = project.new(args["<path>"] or ".", args["--force"], args["--force"])
    notify("SUCCESS", f"A new Brownie project has been initialized at {path}")
Esempio n. 27
0
def _delete(id_):
    path = _get_data_folder().joinpath(f"accounts/{id_}.json")
    path.unlink()
    notify("SUCCESS",
           f"Account '{color('bright blue')}{id_}{color}' has been deleted")
Esempio n. 28
0
def _install_from_github(package_id: str) -> str:
    try:
        path, version = package_id.split("@")
        org, repo = path.split("/")
    except ValueError:
        raise ValueError(
            "Invalid package ID. Must be given as [ORG]/[REPO]@[VERSION]"
            "\ne.g. 'OpenZeppelin/[email protected]'") from None

    base_install_path = _get_data_folder().joinpath("packages")
    install_path = base_install_path.joinpath(f"{org}")
    install_path.mkdir(exist_ok=True)
    install_path = install_path.joinpath(f"{repo}@{version}")
    if install_path.exists():
        raise FileExistsError("Package is aleady installed")

    headers = REQUEST_HEADERS.copy()
    if os.getenv("GITHUB_TOKEN"):
        auth = b64encode(os.environ["GITHUB_TOKEN"].encode()).decode()
        headers.update({"Authorization": "Basic {}".format(auth)})

    response = requests.get(
        f"https://api.github.com/repos/{org}/{repo}/tags?per_page=100",
        headers=headers)
    if response.status_code != 200:
        msg = "Status {} when getting package versions from Github: '{}'".format(
            response.status_code,
            response.json()["message"])
        if response.status_code == 403:
            msg += (
                "\n\nIf this issue persists, generate a Github API token and store"
                " it as the environment variable `GITHUB_TOKEN`:\n"
                "https://github.blog/2013-05-16-personal-api-tokens/")
        raise ConnectionError(msg)

    data = response.json()
    if not data:
        raise ValueError("Github repository has no tags set")
    org, repo = data[0]["zipball_url"].split("/")[3:5]
    tags = [i["name"].lstrip("v") for i in data]
    if version not in tags:
        raise ValueError(
            "Invalid version for this package. Available versions are:\n" +
            ", ".join(tags)) from None

    download_url = next(i["zipball_url"] for i in data
                        if i["name"].lstrip("v") == version)

    existing = list(install_path.parent.iterdir())
    _stream_download(download_url, str(install_path.parent))

    installed = next(i for i in install_path.parent.iterdir()
                     if i not in existing)
    shutil.move(installed, install_path)

    try:
        if not install_path.joinpath("brownie-config.yaml").exists():
            brownie_config: Dict = {"project_structure": {}}

            contract_paths = set(
                i.relative_to(install_path).parts[0]
                for i in install_path.glob("**/*.sol"))
            contract_paths.update(
                i.relative_to(install_path).parts[0]
                for i in install_path.glob("**/*.vy"))
            if not contract_paths:
                raise InvalidPackage(
                    f"{package_id} does not contain any .sol or .vy files")
            if install_path.joinpath("contracts").is_dir():
                brownie_config["project_structure"]["contracts"] = "contracts"
            elif len(contract_paths) == 1:
                brownie_config["project_structure"][
                    "contracts"] = contract_paths.pop()
            else:
                raise InvalidPackage(
                    f"{package_id} has no `contracts/` subdirectory, and "
                    "multiple directories containing source files")

            with install_path.joinpath("brownie-config.yaml").open("w") as fp:
                yaml.dump(brownie_config, fp)

        project = load(install_path)
        project.close()
    except InvalidPackage:
        shutil.rmtree(install_path)
        raise
    except Exception as e:
        notify(
            "WARNING",
            f"Unable to compile {package_id} due to a {type(e).__name__} - you may still be able to"
            " import sources from the package, but will be unable to load the package directly.\n",
        )

    return f"{org}/{repo}@{version}"
Esempio n. 29
0
def generate_build_json(input_json: Dict,
                        output_json: Dict,
                        compiler_data: Optional[Dict] = None,
                        silent: bool = True) -> Dict:
    """Formats standard compiler output to the brownie build json.

    Args:
        input_json: solc input json used to compile
        output_json: output json returned by compiler
        compiler_data: additonal data to include under 'compiler' in build json
        silent: verbose reporting

    Returns: build json dict"""

    if input_json["language"] not in ("Solidity", "Vyper"):
        raise UnsupportedLanguage(f"{input_json['language']}")

    if not silent:
        print("Generating build data...")

    if compiler_data is None:
        compiler_data = {}
    compiler_data["evm_version"] = input_json["settings"]["evmVersion"]
    build_json = {}
    path_list = list(input_json["sources"])

    if input_json["language"] == "Solidity":
        compiler_data["optimizer"] = input_json["settings"]["optimizer"]
        source_nodes, statement_nodes, branch_nodes = solidity._get_nodes(
            output_json)

    for path_str, contract_name in [
        (k, v) for k in path_list for v in output_json["contracts"].get(k, {})
    ]:

        if not silent:
            print(f" - {contract_name}...")

        abi = output_json["contracts"][path_str][contract_name]["abi"]
        natspec = merge_natspec(
            output_json["contracts"][path_str][contract_name].get(
                "devdoc", {}),
            output_json["contracts"][path_str][contract_name].get(
                "userdoc", {}),
        )
        output_evm = output_json["contracts"][path_str][contract_name]["evm"]

        if input_json["language"] == "Solidity":
            contract_node = next(i[contract_name] for i in source_nodes
                                 if i.absolutePath == path_str)
            build_json[contract_name] = solidity._get_unique_build_json(
                output_evm,
                contract_node,
                statement_nodes,
                branch_nodes,
                next((True for i in abi if i["type"] == "fallback"), False),
            )

        else:
            if contract_name == "<stdin>":
                contract_name = "Vyper"
            build_json[contract_name] = vyper._get_unique_build_json(
                output_evm,
                path_str,
                contract_name,
                output_json["sources"][path_str]["ast"],
                (0, len(input_json["sources"][path_str]["content"])),
            )

        build_json[contract_name].update({
            "abi":
            abi,
            "ast":
            output_json["sources"][path_str]["ast"],
            "compiler":
            compiler_data,
            "contractName":
            contract_name,
            "deployedBytecode":
            output_evm["deployedBytecode"]["object"],
            "deployedSourceMap":
            output_evm["deployedBytecode"]["sourceMap"],
            "language":
            input_json["language"],
            "natspec":
            natspec,
            "opcodes":
            output_evm["deployedBytecode"]["opcodes"],
            "sha1":
            sha1(input_json["sources"][path_str]
                 ["content"].encode()).hexdigest(),
            "source":
            input_json["sources"][path_str]["content"],
            "sourceMap":
            output_evm["bytecode"].get("sourceMap", ""),
            "sourcePath":
            path_str,
        })
        size = len(remove_0x_prefix(
            output_evm["deployedBytecode"]["object"])) / 2  # type: ignore
        if size > 24577:
            notify(
                "WARNING",
                f"deployed size of {contract_name} is {size} bytes, exceeds EIP-170 limit of 24577",
            )

    if not silent:
        print("")

    return build_json
Esempio n. 30
0
def _install_from_github(package_id: str) -> str:
    try:
        path, version = package_id.split("@")
        org, repo = path.split("/")
    except ValueError:
        raise ValueError(
            "Invalid package ID. Must be given as [ORG]/[REPO]@[VERSION]"
            "\ne.g. 'OpenZeppelin/[email protected]'") from None

    base_install_path = _get_data_folder().joinpath("packages")
    install_path = base_install_path.joinpath(f"{org}")
    install_path.mkdir(exist_ok=True)
    install_path = install_path.joinpath(f"{repo}@{version}")
    if install_path.exists():
        raise FileExistsError("Package is aleady installed")

    headers = REQUEST_HEADERS.copy()
    headers.update(_maybe_retrieve_github_auth())

    if re.match(r"^[0-9a-f]+$", version):
        download_url = f"https://api.github.com/repos/{org}/{repo}/zipball/{version}"
    else:
        download_url = _get_download_url_from_tag(org, repo, version, headers)

    existing = list(install_path.parent.iterdir())
    _stream_download(download_url, str(install_path.parent), headers)

    installed = next(i for i in install_path.parent.iterdir()
                     if i not in existing)
    shutil.move(installed, install_path)

    try:
        if not install_path.joinpath("brownie-config.yaml").exists():
            brownie_config: Dict = {"project_structure": {}}

            contract_paths = set(
                i.relative_to(install_path).parts[0]
                for i in install_path.glob("**/*.sol"))
            contract_paths.update(
                i.relative_to(install_path).parts[0]
                for i in install_path.glob("**/*.vy"))
            if not contract_paths:
                raise InvalidPackage(
                    f"{package_id} does not contain any .sol or .vy files")
            if install_path.joinpath("contracts").is_dir():
                brownie_config["project_structure"]["contracts"] = "contracts"
            elif len(contract_paths) == 1:
                brownie_config["project_structure"][
                    "contracts"] = contract_paths.pop()
            else:
                raise InvalidPackage(
                    f"{package_id} has no `contracts/` subdirectory, and "
                    "multiple directories containing source files")

            with install_path.joinpath("brownie-config.yaml").open("w") as fp:
                yaml.dump(brownie_config, fp)

        project = load(install_path)
        project.close()
    except InvalidPackage:
        shutil.rmtree(install_path)
        raise
    except Exception as e:
        notify(
            "WARNING",
            f"Unable to compile {package_id} due to a {type(e).__name__} - you may still be able to"
            " import sources from the package, but will be unable to load the package directly.\n",
        )

    return f"{org}/{repo}@{version}"