Example #1
0
    def build_image(self, force=False, dry_run=False):
        """
        locates Dockerfile and triggers podman build
        """
        force_tag = "" if not force else "--no-cache"
        image_dir = self.image_dir

        image = self.image
        if image != self.coursename:
            logger.warning(f"cowardly refusing to rebuild image {image}"
                           f" from course {self.coursename}\n"
                           f"the 2 names should match")
            return

        dockerfile = self.customized("Dockerfile")
        if not dockerfile or not dockerfile.exists():
            logger.error(
                f"Could not spot Dockerfile for course {self.coursename}")
            return

        # clean up and repopulate build dir
        show_and_run(f"rm -rf {image_dir}/*", dry_run=dry_run)
        image_dir.exists() or image_dir.mkdir()  # pylint: disable=w0106

        show_and_run(f"cp {dockerfile} {image_dir}/Dockerfile",
                     dry_run=dry_run)
        show_and_run(f"cp {NBHROOT}/images/start-in-dir-as-uid.sh {image_dir}",
                     dry_run=dry_run)
        show_and_run(
            f"cd {image_dir}; "
            f"podman build {force_tag} -f Dockerfile -t {image} .",
            dry_run=dry_run)
Example #2
0
    def handle(self, *args, **kwargs):

        coursename = kwargs['coursename']
        git_url = kwargs['git_url']
        image = kwargs['image']

        try:
            already = CourseDir.objects.get(coursename=coursename)
            logger.error(f"course {coursename} already existing")
            exit(1)
        except CourseDir.DoesNotExist:
            kwds = {}
            kwds['image'] = image if image else coursename
            created = CourseDir.objects.create(coursename=coursename,
                                               giturl=git_url,
                                               **kwds)
            ok = (created.run_nbh_subprocess('course-init', git_url)
                  and created.pull_from_git())
            if not ok:
                logger.error(f"Could not create course {coursename}")
                logger.warning(
                    f"Double-check (remove if exists) git repo {created.git_dir}"
                )
                created.delete()
            return 0
Example #3
0
 def _locate_track(self, tracks: CourseTracks, trackname) -> Track:
     for item in tracks:
         if item.id == trackname:
             return item
     # find some default
     if tracks:
         logger.warning(
             f"{self} has no {trackname} track - returning first track")
         return tracks[0]
     logger.warning("{self} has no track, returning generic")
     return generic_track(self)
Example #4
0
 def _read_embedded(self):
     try:
         nbo = jupytext.read(self.absolute())
         self._notebookname = (nbo['metadata'].get('notebookname',
                                                   self.clean_path()))
         self._version = (nbo['metadata'].get('version', '0.1'))
     except Exception as exc:
         logger.warning(
             f"failed to extract metadata for notebook {self.clean_path()}\n"
             f"because of exception {type(exc)}: {exc}")
         self._notebookname = self.clean_path()
         self._version = "n/a"
Example #5
0
 def _read_embedded(self):
     try:
         nbo = jupytext.read(self.absolute())
         metadata = nbo['metadata']
         nbh_md = metadata.get('nbhosting', {})
         self._notebookname = (nbh_md.get('title', "") or metadata.get(
             'notebookname', self.clean_path()))
         self._version = (nbh_md.get("version", "")
                          or metadata.get('version', '0.1'))
     except Exception as exc:  # pylint: disable=broad-except
         logger.warning(
             f"failed to extract metadata for notebook {self.clean_path()}\n"
             f"because of exception {type(exc)}: {exc}")
         self._notebookname = self.clean_path()
         self._version = "n/a"
Example #6
0
    def _fetch_course_custom_tracks(self):
        """
        locate and load <course>/nbhosting/tracks.py

        objective is to make this customizable so that some
        notebooks in the repo can be ignored
        and the others organized along different view points

        the tracks() function will receive self as its single parameter
        it is expected to return a dictionary
           track_name -> Track instance
        see flotpython/nbhosting/tracks.py for a realistic example

        the keys in this dictionary are used in the web interface
        to propose the list of available tracks

        absence of tracks.py, or inability to run it, triggers
        the default policy (per directory) implemented in model_track.py
        """

        course_tracks_py = self.customized("tracks.py")

        if course_tracks_py:
            modulename = (f"{self.coursename}_tracks".replace("-", "_"))
            try:
                logger.debug(f"{self} loading module {course_tracks_py}")
                spec = spec_from_file_location(
                    modulename,
                    course_tracks_py,
                )
                module = module_from_spec(spec)
                spec.loader.exec_module(module)
                tracks_fun = module.tracks
                logger.debug(f"triggerring {tracks_fun.__qualname__}()")
                tracks = tracks_fun(self)
                if self._check_tracks(tracks):
                    return tracks
            except Exception:
                logger.exception(f"{self} could not do load custom tracks")
            finally:
                # make sure to reload the python code next time
                # we will need it, in case the course has published an update
                if modulename in sys.modules:
                    del sys.modules[modulename]
        else:
            logger.info(f"{self} no tracks.py hook found")
        logger.warning(f"{self} resorting to generic filesystem-based track")
        return [generic_track(self)]
Example #7
0
    def run_build(
            self,
            build: Build,
            *,  # pylint: disable=too-many-locals
            dry_run=False,
            force=False):
        """
        execute one of the builds provided in nbhosting.yaml

        * preparation: create a launcher script called .clone-build-rsync.sh
          in NBHROOT/builds/<coursename>/<buildid>/<githash>/
          this script contains the 'script' part defined in YAML
          surrounded with some pre- and post- code
        * start a podman container with the relevant areas bind-mounted
          namely the git repo - mounted read-only - and the build area
          mentioned above

        return True if build is done or redone successfully
        """

        coursename = self.coursename
        githash = self.current_hash()

        buildid = build.id
        script = build.script  # pylint: disable=unused-variable
        directory = build.directory  # pylint: disable=unused-variable
        result_folder = build.result_folder  # pylint: disable=unused-variable
        entry_point = build.entry_point  # pylint: disable=unused-variable

        build_path = Path(self.build_dir) / buildid / githash
        if build_path.exists():
            if not build_path.is_dir():
                logger.error(
                    f"{build_path} exists and is not a dir - build aborted")
                return False
            if not force:
                logger.warning(
                    f"build {build_path} already present - run with --force to override"
                )
                return False
            logger.info(f"removing existing build (--force) {build_path}")
            shutil.rmtree(str(build_path))

        variables = "NBHROOT+coursename+script+directory+result_folder"
        # oddly enough a dict comprehension won't work here,
        # saying the variable names are undefined...
        vars_ = {}
        for var in variables.split('+'):
            vars_[var] = eval(var)  # pylint: disable=eval-used

        template = get_template("scripts/dot-clone-build-rsync.sh")
        expanded_script = template.render(vars_)

        host_trigger = build_path / ".clone-build-rsync.sh"
        host_log = host_trigger.with_suffix(".log")
        host_trigger.parent.mkdir(parents=True, exist_ok=True)
        with host_trigger.open('w') as writer:
            writer.write(expanded_script)

        container = f"{coursename}-xbuildx-{buildid}-{githash}"

        podman_c = f""
        podman_c += f" podman run --rm"
        podman_c += f" --name {container}"
        # mount git repo
        podman_c += f" -v {self.git_dir}:{self.git_dir}"
        # ditto under its normalized name if needed
        if self.norm_git_dir != self.git_dir:
            podman_c += f" -v {self.norm_git_dir}:{self.norm_git_dir}"
        # mount subdir of NBHROOT/builds
        podman_c += f" -v {host_trigger.parent}:/home/jovyan/building"
        podman_c += f" {self.image}"
        podman_c += f" bash /home/jovyan/building/.clone-build-rsync.sh"
        podman_c += f" > {host_log} 2>&1"
        success = show_and_run(podman_c, dry_run=dry_run)
        if dry_run:
            logger.info(f"(DRY-RUN) Build script is in {host_trigger}")
        else:
            logger.info(f"See complete log in {host_log}")
            if success:
                # move latest symlink
                latest = Path(self.build_dir) / buildid / "latest"
                latest.exists() and latest.unlink()
                latest.symlink_to(Path(githash), target_is_directory=True)
                logger.info(f"{latest} updated ")
        return success