コード例 #1
0
ファイル: build_tools.py プロジェクト: davnoe/kiji
def clone_maven_repo(target, source):
    """Clones a Maven local repository deeply.

    Clones the entire source Maven local repository into the specified target
    directory. The clone is entirely isolated from the source repository, ie.
    new artifacts downloaded into the clone are not visible in the source
    repository.

    Args:
        target: Where to clone the source Maven repository.
        source: Source Maven repository to clone.
    """
    assert os.path.exists(source), ("Source Maven local repository %r does not exists.", source)

    assert not os.path.exists(target), ("Target Maven local repository %r already exists.", target)
    base.make_dir(target)
    assert os.path.exists(target), ("Could not create target directory %r" % target)

    for name in os.listdir(source):
        source_path = os.path.join(source, name)
        if os.path.isfile(source_path):
            os.symlink(src=source_path, dst=os.path.join(target, name))
        elif os.path.isdir(source_path):
            clone_maven_repo(target=os.path.join(target, name), source=source_path)
        else:
            logging.error("Unable to handle file: %s", source_path)
コード例 #2
0
ファイル: build_tools.py プロジェクト: davnoe/kiji
def backport_updates_from_cloned_repo(clone, source):
    """Back-port updates in a cloned Maven repository to its source.

    Contributes new artifacts from the cloned repository back to the source
    repository, in a way that is thread-safe.
    This operation destroys the cloned repository.

    Args:
        clone: Maven repository to back-port from.
        source: Maven repository source of the clone.
    """
    assert os.path.exists(clone), ("Source Maven local repository %r does not exists.", clone)

    base.make_dir(source)

    for name in os.listdir(clone):
        clone_path = os.path.join(clone, name)
        source_path = os.path.join(source, name)
        if os.path.islink(clone_path):
            # Skip all symlinks
            continue
        elif os.path.isfile(clone_path):
            # Copy file instead of move?
            logging.debug("Back-porting %s as %s", clone_path, source_path)
            os.rename(src=clone_path, dst=source_path)
        elif os.path.isdir(clone_path):
            backport_updates_from_cloned_repo(source=source_path, clone=clone_path)
        else:
            logging.error("Unable to handle file: %s", clone_path)
コード例 #3
0
    def Get(self, group, artifact, version, type, classifier=None):
        """Retrieves an artifact locally.

        Deprecated, this method is going away: use get(artifact) instead.

        If the artifact is a snapshot (version ends with '-SNAPSHOT'),
        all remotes are checked for a newer version.

        Args:
            group: Artifact group ID.
            artifact: Artifact ID.
            version: Artifact version.
            type: Artifact type, aka. packaging ('jar', 'pom', etc).
            classifier: Optional classifier (eg. 'test' or 'release').
        Returns:
            The path of the artifact in the local repository.
            None if the artifact cannot be found.
        """
        coordinate = dict(
            group=group,
            artifact=artifact,
            version=version,
            type=type,
            classifier=classifier,
        )
        logging.log(LOG_LEVEL.DEBUG_VERBOSE,
                    "Getting artifact with coordinate %r", coordinate)
        path = self.local.GetPath(**coordinate)
        parsed = urllib.request.urlparse(self.local.get_url(**coordinate))
        assert (parsed.scheme == "file")
        local_path = parsed.path
        md5_path = "%s.md5" % local_path
        sha1_path = "%s.sha1" % local_path

        # Artifact is a snapshot, resolve it first if allowed:
        if version.endswith("-SNAPSHOT") and self._remote_snapshots:
            # Find the most recent snapshot version from all the remote repositories:

            def ScanRemotes():
                for remote in self.remotes:
                    resolved = remote.resolve(**coordinate)
                    if resolved is None: continue
                    # Allow for snapshots resolved in a local repository:
                    # if resolved["snapshot_version"] is None: continue
                    yield (resolved, remote)

            # Snapshot on remote repositories are expected to have a snapshot_version:
            best_remote = None
            best_version = dict(snapshot_version="", **coordinate)

            # Snapshot built locally into a local repository has no snapshot_version.
            # This lists the local repositories where unversioned snapshots are found:
            local_repos = list()

            for (resolved, remote) in ScanRemotes():
                if resolved["snapshot_version"] is None:
                    local_repos.append(remote)
                elif best_version["snapshot_version"] < resolved[
                        "snapshot_version"]:
                    best_remote = remote
                    best_version = resolved

            if (best_remote is None) and (len(local_repos) == 0):
                raise ArtifactNotFoundError(
                    "Artifact %s:%s:%s:%s:%s not found in remote repositories"
                    % (coordinate["group"], coordinate["artifact"],
                       coordinate["classifier"], coordinate["type"],
                       coordinate["version"]))
            elif len(local_repos) == 0:
                logging.debug("Artifact resolved to %s in remote %s",
                              best_version, best_remote)
            elif best_remote is None:
                assert (len(local_repos) == 1), \
                    ("Multiple snapshot local copies of %r" % coordinate)
                local_repo = local_repos[0]
                parsed = urllib.request.urlparse(
                    local_repo.get_url(**coordinate))
                assert (parsed.scheme == "file")
                local_path = parsed.path
                return local_path
            else:
                raise Error(
                    "Multiple snapshot copies in local repositories and remote repositories: %r"
                    % coordinate)

            (http_reply, md5, sha1) = best_remote.open(**best_version)
            try:
                # Do we have this snapshot artifact locally already:
                if (os.path.exists(local_path) and os.path.exists(md5_path)
                        and os.path.exists(sha1_path)
                        and md5 == self.local.read_md5_file(path)
                        and sha1 == self.local.read_sha1_file(path)
                        and (get_file_fingerprints(local_path)
                             == (self.local.read_md5_file(path),
                                 self.local.read_sha1_file(path)))):
                    logging.log(LOG_LEVEL.DEBUG_VERBOSE,
                                "Snapshot artifact found locally: %r",
                                local_path)
                    return local_path
            finally:
                http_reply.close()
            logging.log(LOG_LEVEL.DEBUG_VERBOSE,
                        "Snapshot artifact not found locally: %r", coordinate)
            remotes = (best_remote, )

        # Artifact is a snapshot but we do not allow remote snapshots:
        elif version.endswith("-SNAPSHOT") and not self._remote_snapshots:
            logging.log(LOG_LEVEL.DEBUG_VERBOSE,
                        "Restricting snapshot artifact to local FS: %r",
                        local_path)
            if os.path.exists(local_path):
                logging.log(LOG_LEVEL.DEBUG_VERBOSE,
                            "Local snapshot artifact found: %r", local_path)
                return local_path
            else:
                logging.debug("Local snapshot artifact not found: %r",
                              local_path)
                return None

        else:
            # Do we have this non-snapshot artifact locally already?
            if (os.path.exists(local_path) and
                (os.path.getsize(local_path) > 0)  # FIXME quick workaround
                    and os.path.exists(md5_path) and os.path.exists(sha1_path)
                    and (get_file_fingerprints(local_path)
                         == (self.local.read_md5_file(path),
                             self.local.read_sha1_file(path)))):
                # Yes, artifact found locally, with matching checksums:
                logging.log(
                    LOG_LEVEL.DEBUG_VERBOSE,
                    "Artifact found locally with matching checksums: %r",
                    local_path)
                return local_path
            else:
                # Look for the artifact in the configured remotes:
                logging.debug("Artifact not found locally: %r", local_path)
                remotes = self.remotes

        # Artifact does not exist locally.
        # Try each remote repository on after another,
        # pick the first that contains the artifact we are looking for:
        for remote in remotes:
            try:
                open_result = remote.open(**coordinate)
                if open_result is None:
                    continue

                (http_reply, md5, sha1) = open_result
                try:
                    base.make_dir(os.path.dirname(local_path))
                    (actual_md5, actual_sha1) = RemoteRepository.read_to_file(
                        http_reply=http_reply,
                        output_path=local_path,
                    )

                    if md5 is None:
                        logging.debug("No MD5 sum for %r from %s", local_path,
                                      remote.path)
                        md5 = actual_md5
                    elif md5 != actual_md5:
                        logging.error(
                            "MD5 mismatch for %r from %r: expected %r, got %r",
                            local_path, remote.path, md5, actual_md5)

                    if sha1 is None:
                        logging.debug("No SHA1 sum for %r from %s", local_path,
                                      remote.path)
                        sha1 = actual_sha1
                    elif sha1 != actual_sha1:
                        logging.error(
                            "SHA1 mismatch for %r from %r: expected %r, got %r",
                            local_path, remote.path, sha1, actual_sha1)

                    if (md5 == actual_md5) and (sha1 == actual_sha1):
                        logging.debug("Writing MD5 sum for %r", local_path)
                        with open(md5_path, "w") as f:
                            f.write(md5)
                        logging.debug("Writing SHA1 sum for %r", local_path)
                        with open(sha1_path, "w") as f:
                            f.write(sha1)
                        return local_path
                    else:
                        logging.warning("Checksum invalid for %r", local_path)
                        os.remove(local_path)
                finally:
                    http_reply.close()

            except urllib.error.HTTPError as err:
                logging.error("Error on remote %r: %r", remote,
                              err.readall().decode())

            except urllib.error.URLError as err:
                if isinstance(err.reason, FileNotFoundError):
                    logging.debug("File not found: %r", err.reason.filename)
                else:
                    logging.error("Error on remote %r: %r", remote, err)

        # Artifact is nowhere to be found:
        return None
コード例 #4
0
    def __init__(
        self,
        path,
        maven_repository=None,
    ):
        """Initializes a new workspace object.

        Args:
            path: Root path of the workspace.
            maven_repository: Optional explicit Maven repository where to search for artifacts.
        """
        self._path = os.path.abspath(path)
        assert os.path.exists(self.path), \
            "Workspace root directory does not exist: {!r}".format(self.path)

        assert os.path.isdir(self.config_dir), \
            "Workspace configuration directory missing: {!r}".format(self.config_dir)

        self._build_defs = build_defs.BuildDefs()

        # Process workspace configuration file:
        conf_file = os.path.join(self.config_dir, "conf.py")
        if os.path.exists(conf_file):
            logging.info("Loading configuration %r", conf_file)
            with open(conf_file, mode="rt", encoding="UTF-8") as f:
                conf_py = f.read()
            self._build_defs.eval(conf_py)

        # Process master BUILD file:
        build_file = os.path.join(self.path, "BUILD")
        if os.path.exists(build_file):
            logging.info("Loading build file %r", build_file)
            with open(build_file, mode="rt", encoding="UTF-8") as f:
                build_py = f.read()
            self._build_defs.eval(build_py)

        else:
            logging.info("BUILD file missing: %r", build_file)

        self._config = record.Record(self._build_defs.exec_locals)
        self.config.definitions = self._build_defs.definitions

        logging.debug("Using configuration: %r", self._config)

        # ------------------------------------------------------------------------------------------

        base.make_dir(self.output_dir)
        base.make_dir(self.temp_dir)
        base.make_dir(self.maven_local_repo)
        base.make_dir(os.path.join(self.temp_dir, "workflow"))
        logging.debug("Generating artifacts in: %s", self.maven_local_repo)

        # ------------------------------------------------------------------------------------------

        self._git = git_wrapper.Git(self.path)

        if maven_repository is None:
            remotes = self.config.get("maven_repositories", tuple())
            maven_repository = maven_repo.MavenRepository(
                local=self.maven_local_repo,
                remotes=remotes,
            )
            logging.debug("Using Maven repositories:\n%s",
                          base.add_margin("\n".join(remotes), "\t- "))
        self._maven_repo = maven_repository

        # Build toolkit:
        self._tools = build_tools.BuildTools(workspace=self)
        self.tools.validate()

        # Workflow generated after the build graph:
        self._workflow = None
コード例 #5
0
ファイル: fiji_build.py プロジェクト: seomoz/fiji
    def run(self, args):
        # Generate a unique ID for this build.
        build_id = "build-%d" % base.now_ms()

        # Determine which workspace to build in:
        workspace = self.workspace
        if self.flags.clone_workspace:
            base.make_dir(FLAGS.build_workspace_dir)
            temp_workspace = os.path.join(tempfile.mkdtemp(dir=FLAGS.build_workspace_dir), build_id)
            workspace = self.workspace.Clone(temp_workspace)

        targets = []
        if (self.flags.targets is not None) and (len(self.flags.targets) > 0):
            targets.extend(self.flags.targets.split(","))
        targets.extend(args)
        targets = frozenset(targets)
        self._targets = targets

        http_monitor = None
        if (self.flags.http_monitor is not None) and (len(self.flags.http_monitor) > 0):
            (interface, port) = base.parse_host_port(self.flags.http_monitor)
            try:
                http_monitor = workflow.WorkflowHTTPMonitor(interface=interface, port=port)
                http_monitor.start()
            except OSError as os_error:
                if (os_error.errno == 48) or (os_error.errno == 98):
                    raise Error(
                        "Address {!s}:{!s} already in use. Specify a different address "
                        "for the workflow http monitor with the --http-monitor flag."
                        .format(interface, port)
                    )
                else:
                    raise

        # Pass flags through the workspace config object.
        workspace.config.enable_incremental_testing = self.flags.enable_incremental_testing
        workspace.config.separate_test_logs = self.flags.separate_test_logs
        workspace.config.test_log_prefix = self.flags.test_log_prefix

        # Run the build:
        try:
            flow = workspace.make_workflow(force_build=self.flags.force_build)
            self._flow = flow

            self.adjust_workflow()
            flow = workspace.process(
                nworkers=self.flags.nworkers,
                http_monitor=http_monitor,
            )

            # TODO(DEV-429): refactor these flags, they are unwieldy
            if self.flags.output_dot is not None:
                with open(self.flags.output_dot, "wt", encoding="UTF-8") as dot_file:
                    dot_file.write(flow.dump_as_dot())

            if self.flags.output_svg is not None:
                with open(self.flags.output_svg, "wt", encoding="UTF-8") as svg_file:
                    svg_file.write(flow.dump_as_svg())

            if self.flags.output_table is not None:
                with open(self.flags.output_table, "wt", encoding="UTF-8") as table_file:
                    table_file.write(flow.dump_state_as_table())

            return len(flow.failed_tasks)  # 0 means success, non 0 means failure

        finally:
            if self.flags.clone_workspace and self.flags.delete_cloned_workspace:
                logging.info("Deleting cloned workspace %s", temp_workspace)
                shutil.rmtree(path=temp_workspace)
コード例 #6
0
    def run(self, args):
        # Generate a unique ID for this build.
        build_id = "build-%d" % base.now_ms()

        # Determine which workspace to build in:
        workspace = self.workspace
        if self.flags.clone_workspace:
            base.make_dir(FLAGS.build_workspace_dir)
            temp_workspace = os.path.join(
                tempfile.mkdtemp(dir=FLAGS.build_workspace_dir), build_id)
            workspace = self.workspace.Clone(temp_workspace)

        targets = []
        if (self.flags.targets is not None) and (len(self.flags.targets) > 0):
            targets.extend(self.flags.targets.split(","))
        targets.extend(args)
        targets = frozenset(targets)
        self._targets = targets

        http_monitor = None
        if (self.flags.http_monitor
                is not None) and (len(self.flags.http_monitor) > 0):
            (interface, port) = base.parse_host_port(self.flags.http_monitor)
            try:
                http_monitor = workflow.WorkflowHTTPMonitor(
                    interface=interface, port=port)
                http_monitor.start()
            except OSError as os_error:
                if (os_error.errno == 48) or (os_error.errno == 98):
                    raise Error(
                        "Address {!s}:{!s} already in use. Specify a different address "
                        "for the workflow http monitor with the --http-monitor flag."
                        .format(interface, port))
                else:
                    raise

        # Pass flags through the workspace config object.
        workspace.config.enable_incremental_testing = self.flags.enable_incremental_testing
        workspace.config.separate_test_logs = self.flags.separate_test_logs
        workspace.config.test_log_prefix = self.flags.test_log_prefix

        # Run the build:
        try:
            flow = workspace.make_workflow(force_build=self.flags.force_build)
            self._flow = flow

            self.adjust_workflow()
            flow = workspace.process(
                nworkers=self.flags.nworkers,
                http_monitor=http_monitor,
            )

            # TODO(DEV-429): refactor these flags, they are unwieldy
            if self.flags.output_dot is not None:
                with open(self.flags.output_dot, "wt",
                          encoding="UTF-8") as dot_file:
                    dot_file.write(flow.dump_as_dot())

            if self.flags.output_svg is not None:
                with open(self.flags.output_svg, "wt",
                          encoding="UTF-8") as svg_file:
                    svg_file.write(flow.dump_as_svg())

            if self.flags.output_table is not None:
                with open(self.flags.output_table, "wt",
                          encoding="UTF-8") as table_file:
                    table_file.write(flow.dump_state_as_table())

            return len(
                flow.failed_tasks)  # 0 means success, non 0 means failure

        finally:
            if self.flags.clone_workspace and self.flags.delete_cloned_workspace:
                logging.info("Deleting cloned workspace %s", temp_workspace)
                shutil.rmtree(path=temp_workspace)
コード例 #7
0
ファイル: maven_repo.py プロジェクト: yubobo/kiji
    def Get(self, group, artifact, version, type, classifier=None):
        """Retrieves an artifact locally.

        Deprecated, this method is going away: use get(artifact) instead.

        If the artifact is a snapshot (version ends with '-SNAPSHOT'),
        all remotes are checked for a newer version.

        Args:
            group: Artifact group ID.
            artifact: Artifact ID.
            version: Artifact version.
            type: Artifact type, aka. packaging ('jar', 'pom', etc).
            classifier: Optional classifier (eg. 'test' or 'release').
        Returns:
            The path of the artifact in the local repository.
            None if the artifact cannot be found.
        """
        coordinate = dict(group=group, artifact=artifact, version=version, type=type, classifier=classifier)
        logging.log(LOG_LEVEL.DEBUG_VERBOSE, "Getting artifact with coordinate %r", coordinate)
        path = self.local.GetPath(**coordinate)
        parsed = urllib.request.urlparse(self.local.get_url(**coordinate))
        assert parsed.scheme == "file"
        local_path = parsed.path
        md5_path = "%s.md5" % local_path
        sha1_path = "%s.sha1" % local_path

        # Artifact is a snapshot, resolve it first if allowed:
        if version.endswith("-SNAPSHOT") and self._remote_snapshots:
            # Find the most recent snapshot version from all the remote repositories:

            def ScanRemotes():
                for remote in self.remotes:
                    resolved = remote.resolve(**coordinate)
                    if resolved is None:
                        continue
                    # Allow for snapshots resolved in a local repository:
                    # if resolved["snapshot_version"] is None: continue
                    yield (resolved, remote)

            # Snapshot on remote repositories are expected to have a snapshot_version:
            best_remote = None
            best_version = dict(snapshot_version="", **coordinate)

            # Snapshot built locally into a local repository has no snapshot_version.
            # This lists the local repositories where unversioned snapshots are found:
            local_repos = list()

            for (resolved, remote) in ScanRemotes():
                if resolved["snapshot_version"] is None:
                    local_repos.append(remote)
                elif best_version["snapshot_version"] < resolved["snapshot_version"]:
                    best_remote = remote
                    best_version = resolved

            if (best_remote is None) and (len(local_repos) == 0):
                raise ArtifactNotFoundError(
                    "Artifact %s:%s:%s:%s:%s not found in remote repositories"
                    % (
                        coordinate["group"],
                        coordinate["artifact"],
                        coordinate["classifier"],
                        coordinate["type"],
                        coordinate["version"],
                    )
                )
            elif len(local_repos) == 0:
                logging.debug("Artifact resolved to %s in remote %s", best_version, best_remote)
            elif best_remote is None:
                assert len(local_repos) == 1, "Multiple snapshot local copies of %r" % coordinate
                local_repo = local_repos[0]
                parsed = urllib.request.urlparse(local_repo.get_url(**coordinate))
                assert parsed.scheme == "file"
                local_path = parsed.path
                return local_path
            else:
                raise Error("Multiple snapshot copies in local repositories and remote repositories: %r" % coordinate)

            (http_reply, md5, sha1) = best_remote.open(**best_version)
            try:
                # Do we have this snapshot artifact locally already:
                if (
                    os.path.exists(local_path)
                    and os.path.exists(md5_path)
                    and os.path.exists(sha1_path)
                    and md5 == self.local.read_md5_file(path)
                    and sha1 == self.local.read_sha1_file(path)
                    and (
                        get_file_fingerprints(local_path)
                        == (self.local.read_md5_file(path), self.local.read_sha1_file(path))
                    )
                ):
                    logging.log(LOG_LEVEL.DEBUG_VERBOSE, "Snapshot artifact found locally: %r", local_path)
                    return local_path
            finally:
                http_reply.close()
            logging.log(LOG_LEVEL.DEBUG_VERBOSE, "Snapshot artifact not found locally: %r", coordinate)
            remotes = (best_remote,)

        # Artifact is a snapshot but we do not allow remote snapshots:
        elif version.endswith("-SNAPSHOT") and not self._remote_snapshots:
            logging.log(LOG_LEVEL.DEBUG_VERBOSE, "Restricting snapshot artifact to local FS: %r", local_path)
            if os.path.exists(local_path):
                logging.log(LOG_LEVEL.DEBUG_VERBOSE, "Local snapshot artifact found: %r", local_path)
                return local_path
            else:
                logging.debug("Local snapshot artifact not found: %r", local_path)
                return None

        else:
            # Do we have this non-snapshot artifact locally already?
            if (
                os.path.exists(local_path)
                and (os.path.getsize(local_path) > 0)  # FIXME quick workaround
                and os.path.exists(md5_path)
                and os.path.exists(sha1_path)
                and (
                    get_file_fingerprints(local_path)
                    == (self.local.read_md5_file(path), self.local.read_sha1_file(path))
                )
            ):
                # Yes, artifact found locally, with matching checksums:
                logging.log(LOG_LEVEL.DEBUG_VERBOSE, "Artifact found locally with matching checksums: %r", local_path)
                return local_path
            else:
                # Look for the artifact in the configured remotes:
                logging.debug("Artifact not found locally: %r", local_path)
                remotes = self.remotes

        # Artifact does not exist locally.
        # Try each remote repository on after another,
        # pick the first that contains the artifact we are looking for:
        for remote in remotes:
            try:
                open_result = remote.open(**coordinate)
                if open_result is None:
                    continue

                (http_reply, md5, sha1) = open_result
                try:
                    base.make_dir(os.path.dirname(local_path))
                    (actual_md5, actual_sha1) = RemoteRepository.read_to_file(
                        http_reply=http_reply, output_path=local_path
                    )

                    if md5 is None:
                        logging.debug("No MD5 sum for %r from %s", local_path, remote.path)
                        md5 = actual_md5
                    elif md5 != actual_md5:
                        logging.error(
                            "MD5 mismatch for %r from %r: expected %r, got %r", local_path, remote.path, md5, actual_md5
                        )

                    if sha1 is None:
                        logging.debug("No SHA1 sum for %r from %s", local_path, remote.path)
                        sha1 = actual_sha1
                    elif sha1 != actual_sha1:
                        logging.error(
                            "SHA1 mismatch for %r from %r: expected %r, got %r",
                            local_path,
                            remote.path,
                            sha1,
                            actual_sha1,
                        )

                    if (md5 == actual_md5) and (sha1 == actual_sha1):
                        logging.debug("Writing MD5 sum for %r", local_path)
                        with open(md5_path, "w") as f:
                            f.write(md5)
                        logging.debug("Writing SHA1 sum for %r", local_path)
                        with open(sha1_path, "w") as f:
                            f.write(sha1)
                        return local_path
                    else:
                        logging.warning("Checksum invalid for %r", local_path)
                        os.remove(local_path)
                finally:
                    http_reply.close()

            except urllib.error.HTTPError as err:
                logging.error("Error on remote %r: %r", remote, err.readall().decode())

            except urllib.error.URLError as err:
                if isinstance(err.reason, FileNotFoundError):
                    logging.debug("File not found: %r", err.reason.filename)
                else:
                    logging.error("Error on remote %r: %r", remote, err)

        # Artifact is nowhere to be found:
        return None
コード例 #8
0
ファイル: workspace.py プロジェクト: davnoe/kiji
    def __init__(
        self,
        path,
        maven_repository=None,
    ):
        """Initializes a new workspace object.

        Args:
            path: Root path of the workspace.
            maven_repository: Optional explicit Maven repository where to search for artifacts.
        """
        self._path = os.path.abspath(path)
        assert os.path.exists(self.path), \
            "Workspace root directory does not exist: {!r}".format(self.path)

        assert os.path.isdir(self.config_dir), \
            "Workspace configuration directory missing: {!r}".format(self.config_dir)

        self._build_defs = build_defs.BuildDefs()

        # Process workspace configuration file:
        conf_file = os.path.join(self.config_dir, "conf.py")
        if os.path.exists(conf_file):
            logging.info("Loading configuration %r", conf_file)
            with open(conf_file, mode="rt", encoding="UTF-8") as f:
                conf_py = f.read()
            self._build_defs.eval(conf_py)

        # Process master BUILD file:
        build_file = os.path.join(self.path, "BUILD")
        if os.path.exists(build_file):
            logging.info("Loading build file %r", build_file)
            with open(build_file, mode="rt", encoding="UTF-8") as f:
                build_py = f.read()
            self._build_defs.eval(build_py)

        else:
            logging.info("BUILD file missing: %r", build_file)

        self._config = record.Record(self._build_defs.exec_locals)
        self.config.definitions = self._build_defs.definitions

        logging.debug("Using configuration: %r", self._config)

        # ------------------------------------------------------------------------------------------

        base.make_dir(self.output_dir)
        base.make_dir(self.temp_dir)
        base.make_dir(self.maven_local_repo)
        base.make_dir(os.path.join(self.temp_dir, "workflow"))
        logging.debug("Generating artifacts in: %s", self.maven_local_repo)

        # ------------------------------------------------------------------------------------------

        self._git = git_wrapper.Git(self.path)

        if maven_repository is None:
            remotes = self.config.get("maven_repositories", tuple())
            maven_repository = maven_repo.MavenRepository(
                local=self.maven_local_repo,
                remotes=remotes,
            )
            logging.debug("Using Maven repositories:\n%s",
                          base.add_margin("\n".join(remotes), "\t- "))
        self._maven_repo = maven_repository

        # Build toolkit:
        self._tools = build_tools.BuildTools(workspace=self)
        self.tools.validate()

        # Workflow generated after the build graph:
        self._workflow = None