Beispiel #1
0
    def load(self) -> None:
        """Compiles the project contracts, creates ContractContainer objects and
        populates the namespace."""
        if self._active:
            raise ProjectAlreadyLoaded("Project is already active")

        contract_sources = _load_sources(self._path, "contracts", False)
        interface_sources = _load_sources(self._path, "interfaces", True)
        self._sources = Sources(contract_sources, interface_sources)
        self._build = Build(self._sources)

        contract_list = self._sources.get_contract_list()
        for path in list(self._path.glob("build/contracts/*.json")):
            try:
                with path.open() as fp:
                    build_json = json.load(fp)
            except json.JSONDecodeError:
                build_json = {}
            if not set(BUILD_KEYS).issubset(
                    build_json) or path.stem not in contract_list:
                path.unlink()
                continue
            if isinstance(build_json["allSourcePaths"], list):
                # this handles the format change in v1.7.0, it can be removed in a future release
                path.unlink()
                test_path = self._path.joinpath("build/tests.json")
                if test_path.exists():
                    test_path.unlink()
                continue
            self._build._add(build_json)

        self._compiler_config = _load_project_compiler_config(self._path)

        # compile updated sources, update build
        changed = self._get_changed_contracts()
        self._compile(changed, self._compiler_config, False)
        self._save_interface_hashes()
        self._create_containers()
        self._load_deployments()

        # add project to namespaces, apply import blackmagic
        name = self._name
        self.__all__ = list(self._containers)
        sys.modules[f"brownie.project.{name}"] = self  # type: ignore
        sys.modules["brownie.project"].__dict__[name] = self
        sys.modules["brownie.project"].__all__.append(name)  # type: ignore
        sys.modules["brownie.project"].__console_dir__.append(
            name)  # type: ignore
        self._namespaces = [
            sys.modules["__main__"].__dict__,
            sys.modules["brownie.project"].__dict__,
        ]
        self._active = True
        _loaded_projects.append(self)
Beispiel #2
0
 def __init__(self, name: str, contract_sources: Dict,
              compiler_config: Dict) -> None:
     self._path = None
     self._name = name
     self._sources = Sources(contract_sources, {})
     self._build = Build(self._sources)
     self._compile(contract_sources, compiler_config, True)
     self._create_containers()
Beispiel #3
0
class Project(_ProjectBase):
    """
    Top level dict-like container that holds data and objects related to
    a brownie project.

    Attributes:
        _path: Path object, absolute path to the project
        _name: Name that the project is loaded as
        _sources: project Source object
        _build: project Build object
    """
    def __init__(self, name: str, project_path: Path) -> None:
        self._path: Path = project_path
        self._envvars = _load_project_envvars(project_path)
        self._structure = expand_posix_vars(
            _load_project_structure_config(project_path), self._envvars)
        self._build_path: Path = project_path.joinpath(
            self._structure["build"])

        self._name = name
        self._active = False
        self.load()

    def load(self) -> None:
        """Compiles the project contracts, creates ContractContainer objects and
        populates the namespace."""
        if self._active:
            raise ProjectAlreadyLoaded("Project is already active")

        contract_sources = _load_sources(self._path,
                                         self._structure["contracts"], False)
        interface_sources = _load_sources(self._path,
                                          self._structure["interfaces"], True)
        self._sources = Sources(contract_sources, interface_sources)
        self._build = Build(self._sources)

        contract_list = self._sources.get_contract_list()
        for path in list(self._build_path.glob("contracts/*.json")):
            try:
                with path.open() as fp:
                    build_json = json.load(fp)
            except json.JSONDecodeError:
                build_json = {}
            if not set(BUILD_KEYS).issubset(
                    build_json) or path.stem not in contract_list:
                path.unlink()
                continue
            if isinstance(build_json["allSourcePaths"], list):
                # this handles the format change in v1.7.0, it can be removed in a future release
                path.unlink()
                test_path = self._build_path.joinpath("tests.json")
                if test_path.exists():
                    test_path.unlink()
                continue
            if not self._path.joinpath(build_json["sourcePath"]).exists():
                path.unlink()
                continue
            self._build._add_contract(build_json)

        interface_hashes = {}
        interface_list = self._sources.get_interface_list()
        for path in list(self._build_path.glob("interfaces/*.json")):
            try:
                with path.open() as fp:
                    build_json = json.load(fp)
            except json.JSONDecodeError:
                build_json = {}
            if not set(INTERFACE_KEYS).issubset(
                    build_json) or path.stem not in interface_list:
                path.unlink()
                continue
            self._build._add_interface(build_json)
            interface_hashes[path.stem] = build_json["sha1"]

        self._compiler_config = expand_posix_vars(
            _load_project_compiler_config(self._path), self._envvars)

        # compile updated sources, update build
        changed = self._get_changed_contracts(interface_hashes)
        self._compile(changed, self._compiler_config, False)
        self._compile_interfaces(interface_hashes)
        self._load_dependency_artifacts()

        self._create_containers()
        self._load_deployments()

        # add project to namespaces, apply import blackmagic
        name = self._name
        self.__all__ = list(self._containers) + ["interface"]
        sys.modules[f"brownie.project.{name}"] = self  # type: ignore
        sys.modules["brownie.project"].__dict__[name] = self
        sys.modules["brownie.project"].__all__.append(name)  # type: ignore
        sys.modules["brownie.project"].__console_dir__.append(
            name)  # type: ignore
        self._namespaces = [
            sys.modules["__main__"].__dict__,
            sys.modules["brownie.project"].__dict__,
        ]

        # register project for revert and reset
        _revert_register(self)

        self._active = True
        _loaded_projects.append(self)

    def _get_changed_contracts(self, compiled_hashes: Dict) -> Dict:
        # get list of changed interfaces and contracts
        new_hashes = self._sources.get_interface_hashes()
        # remove outdated build artifacts
        for name in [
                k for k, v in new_hashes.items()
                if compiled_hashes.get(k, None) != v
        ]:
            self._build._remove_interface(name)

        contracts = set(i for i in self._sources.get_contract_list()
                        if self._compare_build_json(i))
        for contract_name in list(contracts):
            contracts.update(self._build.get_dependents(contract_name))

        # remove outdated build artifacts
        for name in contracts:
            self._build._remove_contract(name)

        # get final list of changed source paths
        changed_set: Set = set(
            self._sources.get_source_path(i) for i in contracts)
        return {i: self._sources.get(i) for i in changed_set}

    def _compare_build_json(self, contract_name: str) -> bool:
        config = self._compiler_config
        # confirm that this contract was previously compiled
        try:
            source = self._sources.get(contract_name)
            build_json = self._build.get(contract_name)
        except KeyError:
            return True
        # compare source hashes
        if build_json["sha1"] != sha1(source.encode()).hexdigest():
            return True
        # compare compiler settings
        if _compare_settings(config, build_json["compiler"]):
            return True
        if build_json["language"] == "Solidity":
            # compare solc-specific compiler settings
            solc_config = config["solc"].copy()
            solc_config["remappings"] = None
            if _compare_settings(solc_config, build_json["compiler"]):
                return True
            # compare solc pragma against compiled version
            if Version(build_json["compiler"]
                       ["version"]) not in get_pragma_spec(source):
                return True
        return False

    def _compile_interfaces(self, compiled_hashes: Dict) -> None:
        new_hashes = self._sources.get_interface_hashes()
        changed_paths = [
            self._sources.get_source_path(k, True)
            for k, v in new_hashes.items() if compiled_hashes.get(k, None) != v
        ]
        if not changed_paths:
            return

        print("Generating interface ABIs...")
        changed_sources = {i: self._sources.get(i) for i in changed_paths}
        abi_json = compiler.get_abi(
            changed_sources,
            allow_paths=self._path.as_posix(),
            remappings=self._compiler_config["solc"].get("remappings", []),
        )

        for name, abi in abi_json.items():

            with self._build_path.joinpath(f"interfaces/{name}.json").open(
                    "w") as fp:
                json.dump(abi, fp, sort_keys=True, indent=2, default=sorted)
            self._build._add_interface(abi)

    def _load_dependency_artifacts(self) -> None:
        dep_build_path = self._build_path.joinpath("contracts/dependencies/")
        for path in list(dep_build_path.glob("**/*.json")):
            contract_alias = path.relative_to(dep_build_path).with_suffix(
                "").as_posix()
            if self._build.get_dependents(contract_alias):
                with path.open() as fp:
                    build_json = json.load(fp)
                self._build._add_contract(build_json, contract_alias)
            else:
                path.unlink()

    def _load_deployments(self) -> None:
        if CONFIG.network_type != "live" and not CONFIG.settings[
                "dev_deployment_artifacts"]:
            return
        chainid = CONFIG.active_network[
            "chainid"] if CONFIG.network_type == "live" else "dev"
        path = self._build_path.joinpath(f"deployments/{chainid}")
        path.mkdir(exist_ok=True)
        deployments = list(path.glob("*.json"))
        deployments.sort(key=lambda k: k.stat().st_mtime)
        deployment_map = self._load_deployment_map()
        for build_json in deployments:
            with build_json.open() as fp:
                build = json.load(fp)

            contract_name = build["contractName"]
            if contract_name not in self._containers:
                build_json.unlink()
                continue
            if "pcMap" in build:
                contract = ProjectContract(self, build, build_json.stem)
            else:
                contract = Contract.from_abi(  # type: ignore
                    contract_name, build_json.stem, build["abi"])
                contract._project = self
            container = self._containers[contract_name]
            _add_contract(contract)
            container._contracts.append(contract)

            # update deployment map for the current chain
            instances = deployment_map.setdefault(chainid, {}).setdefault(
                contract_name, [])
            if build_json.stem in instances:
                instances.remove(build_json.stem)
            instances.insert(0, build_json.stem)

        self._save_deployment_map(deployment_map)

    def _load_deployment_map(self) -> Dict:
        deployment_map: Dict = {}
        map_path = self._build_path.joinpath("deployments/map.json")
        if map_path.exists():
            with map_path.open("r") as fp:
                deployment_map = json.load(fp)
        return deployment_map

    def _save_deployment_map(self, deployment_map: Dict) -> None:
        with self._build_path.joinpath("deployments/map.json").open("w") as fp:
            json.dump(deployment_map,
                      fp,
                      sort_keys=True,
                      indent=2,
                      default=sorted)

    def _remove_from_deployment_map(self, contract: ProjectContract) -> None:
        if CONFIG.network_type != "live" and not CONFIG.settings[
                "dev_deployment_artifacts"]:
            return
        chainid = CONFIG.active_network[
            "chainid"] if CONFIG.network_type == "live" else "dev"
        deployment_map = self._load_deployment_map()
        try:
            deployment_map[chainid][contract._name].remove(contract.address)
            if not deployment_map[chainid][contract._name]:
                del deployment_map[chainid][contract._name]
            if not deployment_map[chainid]:
                del deployment_map[chainid]
        except (KeyError, ValueError):
            pass

        self._save_deployment_map(deployment_map)

    def _add_to_deployment_map(self, contract: ProjectContract) -> None:
        if CONFIG.network_type != "live" and not CONFIG.settings[
                "dev_deployment_artifacts"]:
            return

        chainid = CONFIG.active_network[
            "chainid"] if CONFIG.network_type == "live" else "dev"
        deployment_map = self._load_deployment_map()
        try:
            deployment_map[chainid][contract._name].remove(contract.address)
        except (ValueError, KeyError):
            pass
        deployment_map.setdefault(chainid,
                                  {}).setdefault(contract._name, []).insert(
                                      0, contract.address)
        self._save_deployment_map(deployment_map)

    def _update_and_register(self, dict_: Any) -> None:
        dict_.update(self)
        if "interface" not in dict_:
            dict_["interface"] = self.interface
        self._namespaces.append(dict_)

    def _add_to_main_namespace(self) -> None:
        # temporarily adds project objects to the main namespace
        brownie: Any = sys.modules["brownie"]
        if "interface" not in brownie.__dict__:
            brownie.__dict__["interface"] = self.interface
        brownie.__dict__.update(self._containers)
        brownie.__all__.extend(self.__all__)

    def _remove_from_main_namespace(self) -> None:
        # removes project objects from the main namespace
        brownie: Any = sys.modules["brownie"]
        if brownie.__dict__.get("interface") == self.interface:
            del brownie.__dict__["interface"]
        for key in self._containers:
            brownie.__dict__.pop(key, None)
        for key in self.__all__:
            if key in brownie.__all__:
                brownie.__all__.remove(key)

    def __repr__(self) -> str:
        return f"<Project '{self._name}'>"

    def load_config(self) -> None:
        """Loads the project config file settings"""
        if isinstance(self._path, Path):
            _load_project_config(self._path)

    def close(self, raises: bool = True) -> None:
        """Removes pointers to the project's ContractContainer objects and this object."""
        if not self._active:
            if not raises:
                return
            raise ProjectNotFound("Project is not currently loaded.")

        # remove objects from namespace
        for dict_ in self._namespaces:
            for key in [
                    k for k, v in dict_.items() if v == self or
                (k in self and v == self[k])  # type: ignore
            ]:
                del dict_[key]

        # remove contracts
        for contract in [
                x for v in self._containers.values() for x in v._contracts
        ]:
            _remove_contract(contract)
        for container in self._containers.values():
            container._contracts.clear()
        self._containers.clear()

        # undo black-magic
        self._remove_from_main_namespace()
        name = self._name
        del sys.modules[f"brownie.project.{name}"]
        sys.modules["brownie.project"].__all__.remove(name)  # type: ignore
        sys.modules["brownie.project"].__console_dir__.remove(
            name)  # type: ignore
        self._active = False
        _loaded_projects.remove(self)

        # clear paths
        try:
            sys.path.remove(str(self._path))
        except ValueError:
            pass

    def _clear_dev_deployments(self, height: int) -> None:
        path = self._build_path.joinpath("deployments/dev")
        if path.exists():
            deployment_map = self._load_deployment_map()
            for deployment in path.glob("*.json"):
                if height == 0:
                    deployment.unlink()
                else:
                    with deployment.open("r") as fp:
                        deployment_artifact = json.load(fp)
                    block_height = deployment_artifact["deployment"][
                        "blockHeight"]
                    address = deployment_artifact["deployment"]["address"]
                    contract_name = deployment_artifact["contractName"]
                    if block_height > height:
                        deployment.unlink()
                        try:
                            deployment_map["dev"][contract_name].remove(
                                address)
                        except (KeyError, ValueError):
                            pass
            if "dev" in deployment_map and (height == 0
                                            or not deployment_map["dev"]):
                del deployment_map["dev"]
                shutil.rmtree(path)

            self._save_deployment_map(deployment_map)

    def _revert(self, height: int) -> None:
        self._clear_dev_deployments(height)

    def _reset(self) -> None:
        self._clear_dev_deployments(0)
Beispiel #4
0
    def load(self, raise_if_loaded: bool = True) -> None:
        """Compiles the project contracts, creates ContractContainer objects and
        populates the namespace."""
        if self._active:
            if raise_if_loaded:
                raise ProjectAlreadyLoaded("Project is already active")
            return None

        contract_sources = _load_sources(self._path,
                                         self._structure["contracts"], False)
        interface_sources = _load_sources(self._path,
                                          self._structure["interfaces"], True)
        self._sources = Sources(contract_sources, interface_sources)
        self._build = Build(self._sources)

        contract_list = self._sources.get_contract_list()
        for path in list(self._build_path.glob("contracts/*.json")):
            try:
                with path.open() as fp:
                    build_json = json.load(fp)
            except json.JSONDecodeError:
                build_json = {}
            if not set(BUILD_KEYS).issubset(
                    build_json) or path.stem not in contract_list:
                path.unlink()
                continue
            if isinstance(build_json["allSourcePaths"], list):
                # this handles the format change in v1.7.0, it can be removed in a future release
                path.unlink()
                test_path = self._build_path.joinpath("tests.json")
                if test_path.exists():
                    test_path.unlink()
                continue
            if not self._path.joinpath(build_json["sourcePath"]).exists():
                path.unlink()
                continue
            self._build._add_contract(build_json)

        interface_hashes = {}
        interface_list = self._sources.get_interface_list()
        for path in list(self._build_path.glob("interfaces/*.json")):
            try:
                with path.open() as fp:
                    build_json = json.load(fp)
            except json.JSONDecodeError:
                build_json = {}
            if not set(INTERFACE_KEYS).issubset(
                    build_json) or path.stem not in interface_list:
                path.unlink()
                continue
            self._build._add_interface(build_json)
            interface_hashes[path.stem] = build_json["sha1"]

        self._compiler_config = expand_posix_vars(
            _load_project_compiler_config(self._path), self._envvars)

        # compile updated sources, update build
        changed = self._get_changed_contracts(interface_hashes)
        self._compile(changed, self._compiler_config, False)
        self._compile_interfaces(interface_hashes)
        self._load_dependency_artifacts()

        self._create_containers()
        self._load_deployments()

        # add project to namespaces, apply import blackmagic
        name = self._name
        self.__all__ = list(self._containers) + ["interface"]
        sys.modules[f"brownie.project.{name}"] = self  # type: ignore
        sys.modules["brownie.project"].__dict__[name] = self
        sys.modules["brownie.project"].__all__.append(name)  # type: ignore
        sys.modules["brownie.project"].__console_dir__.append(
            name)  # type: ignore
        self._namespaces = [
            sys.modules["__main__"].__dict__,
            sys.modules["brownie.project"].__dict__,
        ]

        # register project for revert and reset
        _revert_register(self)

        self._active = True
        _loaded_projects.append(self)
Beispiel #5
0
 def __init__(self, name: str, contract_sources: Dict,
              project_path: Optional[Path]) -> None:
     self._path = project_path
     self._name = name
     self._sources = Sources(contract_sources)
     self._build = Build(self._sources)
Beispiel #6
0
class Project(_ProjectBase):
    """
    Top level dict-like container that holds data and objects related to
    a brownie project.

    Attributes:
        _path: Path object, absolute path to the project
        _name: Name that the project is loaded as
        _sources: project Source object
        _build: project Build object
    """
    def __init__(self, name: str, project_path: Path) -> None:
        self._path: Path = project_path
        self._name = name
        self._active = False
        self.load()

    def load(self) -> None:
        """Compiles the project contracts, creates ContractContainer objects and
        populates the namespace."""
        if self._active:
            raise ProjectAlreadyLoaded("Project is already active")

        contract_sources = _load_sources(self._path, "contracts", False)
        interface_sources = _load_sources(self._path, "interfaces", True)
        self._sources = Sources(contract_sources, interface_sources)
        self._build = Build(self._sources)

        contract_list = self._sources.get_contract_list()
        for path in list(self._path.glob("build/contracts/*.json")):
            try:
                with path.open() as fp:
                    build_json = json.load(fp)
            except json.JSONDecodeError:
                build_json = {}
            if not set(BUILD_KEYS).issubset(
                    build_json) or path.stem not in contract_list:
                path.unlink()
                continue
            if isinstance(build_json["allSourcePaths"], list):
                # this handles the format change in v1.7.0, it can be removed in a future release
                path.unlink()
                test_path = self._path.joinpath("build/tests.json")
                if test_path.exists():
                    test_path.unlink()
                continue
            self._build._add(build_json)

        self._compiler_config = _load_project_compiler_config(self._path)

        # compile updated sources, update build
        changed = self._get_changed_contracts()
        self._compile(changed, self._compiler_config, False)
        self._save_interface_hashes()
        self._create_containers()
        self._load_deployments()

        # add project to namespaces, apply import blackmagic
        name = self._name
        self.__all__ = list(self._containers)
        sys.modules[f"brownie.project.{name}"] = self  # type: ignore
        sys.modules["brownie.project"].__dict__[name] = self
        sys.modules["brownie.project"].__all__.append(name)  # type: ignore
        sys.modules["brownie.project"].__console_dir__.append(
            name)  # type: ignore
        self._namespaces = [
            sys.modules["__main__"].__dict__,
            sys.modules["brownie.project"].__dict__,
        ]
        self._active = True
        _loaded_projects.append(self)

    def _get_changed_contracts(self) -> Dict:
        # get list of changed interfaces and contracts
        old_hashes = self._load_interface_hashes()
        new_hashes = self._sources.get_interface_hashes()
        interfaces = [
            k for k, v in new_hashes.items() if old_hashes.get(k, None) != v
        ]
        contracts = [
            i for i in self._sources.get_contract_list()
            if self._compare_build_json(i)
        ]

        # get dependents of changed sources
        final = set(contracts + interfaces)
        for contract_name in list(final):
            final.update(self._build.get_dependents(contract_name))

        # remove outdated build artifacts
        for name in [i for i in final if self._build.contains(i)]:
            self._build._remove(name)

        # get final list of changed source paths
        final.difference_update(interfaces)
        changed_set: Set = set(self._sources.get_source_path(i) for i in final)
        return {i: self._sources.get(i) for i in changed_set}

    def _load_interface_hashes(self) -> Dict:
        try:
            with self._path.joinpath("build/interfaces.json").open() as fp:
                return json.load(fp)
        except (FileNotFoundError, json.JSONDecodeError):
            return {}

    def _save_interface_hashes(self) -> None:
        interface_hashes = self._sources.get_interface_hashes()
        with self._path.joinpath("build/interfaces.json").open("w") as fp:
            json.dump(interface_hashes, fp, sort_keys=True, indent=2)

    def _compare_build_json(self, contract_name: str) -> bool:
        config = self._compiler_config
        # confirm that this contract was previously compiled
        try:
            source = self._sources.get(contract_name)
            build_json = self._build.get(contract_name)
        except KeyError:
            return True
        # compare source hashes
        if build_json["sha1"] != sha1(source.encode()).hexdigest():
            return True
        # compare compiler settings
        if _compare_settings(config, build_json["compiler"]):
            return True
        if build_json["language"] == "Solidity":
            # compare solc-specific compiler settings
            solc_config = config["solc"].copy()
            solc_config["remappings"] = None
            if _compare_settings(solc_config, build_json["compiler"]):
                return True
            # compare solc pragma against compiled version
            if Version(build_json["compiler"]
                       ["version"]) not in get_pragma_spec(source):
                return True
        return False

    def _load_deployments(self) -> None:
        if CONFIG.network_type != "live":
            return
        chainid = CONFIG.active_network["chainid"]
        path = self._path.joinpath(f"build/deployments/{chainid}")
        path.mkdir(exist_ok=True)
        deployments = list(path.glob("*.json"))
        deployments.sort(key=lambda k: k.stat().st_mtime)
        for build_json in deployments:
            with build_json.open() as fp:
                build = json.load(fp)
            if build["contractName"] not in self._containers:
                build_json.unlink()
                continue
            if "pcMap" in build:
                contract = ProjectContract(self, build, build_json.stem)
            else:
                contract = Contract(  # type: ignore
                    build["contractName"], build_json.stem, build["abi"])
                contract._project = self
            container = self._containers[build["contractName"]]
            _add_contract(contract)
            container._contracts.append(contract)

    def _update_and_register(self, dict_: Any) -> None:
        dict_.update(self)
        self._namespaces.append(dict_)

    def _add_to_main_namespace(self) -> None:
        # temporarily adds project objects to the main namespace
        brownie: Any = sys.modules["brownie"]
        brownie.__dict__.update(self._containers)
        brownie.__all__.extend(self.__all__)

    def _remove_from_main_namespace(self) -> None:
        # removes project objects from the main namespace
        brownie: Any = sys.modules["brownie"]
        for key in self._containers:
            brownie.__dict__.pop(key, None)
        for key in self.__all__:
            if key in brownie.__all__:
                brownie.__all__.remove(key)

    def __repr__(self) -> str:
        return f"<Project '{self._name}'>"

    def load_config(self) -> None:
        """Loads the project config file settings"""
        if isinstance(self._path, Path):
            _load_project_config(self._path)

    def close(self, raises: bool = True) -> None:
        """Removes pointers to the project's ContractContainer objects and this object."""
        if not self._active:
            if not raises:
                return
            raise ProjectNotFound("Project is not currently loaded.")

        # remove objects from namespace
        for dict_ in self._namespaces:
            for key in [
                    k for k, v in dict_.items() if v == self or
                (k in self and v == self[k])  # type: ignore
            ]:
                del dict_[key]

        # remove contracts
        for contract in [
                x for v in self._containers.values() for x in v._contracts
        ]:
            _remove_contract(contract)
        for container in self._containers.values():
            container._contracts.clear()
        self._containers.clear()

        # undo black-magic
        self._remove_from_main_namespace()
        name = self._name
        del sys.modules[f"brownie.project.{name}"]
        sys.modules["brownie.project"].__all__.remove(name)  # type: ignore
        sys.modules["brownie.project"].__console_dir__.remove(
            name)  # type: ignore
        self._active = False
        _loaded_projects.remove(self)

        # clear paths
        try:
            sys.path.remove(str(self._path))
        except ValueError:
            pass
Beispiel #7
0
 def __init__(self, project_path, name):
     self._project_path = project_path
     self._name = name
     self._sources = Sources(project_path)
     self._build = Build(project_path, self._sources)
Beispiel #8
0
 def __init__(self, project_path: Optional["Path"], name: str) -> None:
     self._project_path = project_path
     self._name = name
     self._sources = Sources(project_path)
     self._build = Build(project_path, self._sources)