def container_kill_request(request, course, student): # xxx the names in urls.py need a cleanup coursename = course logger.info(f"container_kill_request: {coursename} x {student}") all_right, explanation = authorized(request) if not all_right: return HttpResponseForbidden( f"Access denied: {explanation}") coursedir = CourseDir.objects.get(coursename=coursename) if not coursedir.is_valid(): return error_page( request, coursename, student, "n/a", f"no such coursename {coursename}" ) success = coursedir.kill_student_container(student) if not success: return error_page( request, coursename, student, "n/a", "could not kill container" ) return render(request, "container-killed.html", dict(coursename=coursename, student=student))
def handle(self, *args, **kwargs): patterns = kwargs['patterns'] selected = sorted(CourseDir.courses_by_patterns(patterns)) for coursedir in selected: logger.info( f"{40*'='} updating from git for {coursedir.coursename}") coursedir.pull_from_git()
def show_and_run(command, *, dry_run=False): if not dry_run: logger.info(f"# {command}") completed = subprocess.run(command, shell=True) return completed.returncode == 0 else: logger.info(f"(DRY-RUN) # {command}") return False
def handle(self, *args, **kwargs): dry_run = kwargs['dry_run'] force = kwargs['force'] patterns = kwargs['patterns'] selected = sorted(CourseDir.courses_by_patterns(patterns)) for coursedir in selected: logger.info(f"{40*'='} building image for {coursedir.coursename}") coursedir.build_image(force, dry_run)
def from_uploaded(coursedir, droparea, inmemory: "InMemoryUploadedFile"): logger.info(f"Uploaded {inmemory.name}") # save it in the course area dropped = Dropped(coursedir, droparea, inmemory.name, inmemory.size) dropped.mkdir() with inmemory.open() as reader: with open(dropped.fullpath, 'wb') as writer: writer.write(reader.read()) return dropped
def locate_notebook(directory, notebook): """ with jupytext in the picture, and with our needs to be able to publish raw .md files, there is a need to be a little smarter and to locate an actual contents from the 'notebook' path returns a 4-uple exists notebook_with_extension notebook_without_extension is_notebook if exists is False, then all the rest is None otherwise directory/notebook_with_extension is an existing file policy according to notebook's suffix * if notebook ends up with .md it is assumed to be existing * if notebook does not end with anything search for .py then .ipynb then .md * if notebook ends in .ipynb search for .ipynb then .py * if notebook ends in .py search for .py and then .ipynb xxx possibly this could make simpler, especially with notebooks that only save themselves under a single format - as dual-format is a real pain in terms of updating a student's space most likely it should use sitesettings.notebook_extensions but it's safer to keep it that way until the bulk of 2019/2020 courses is not over """ logger.info(f"locate_notebook with {directory} and {notebook}") policies = { '.md': ['.md'], '': ['.py', '.ipynb', '.md'], '.ipynb': ['.ipynb', '.py'], '.py': ['.py', '.ipynb'], } top = Path(directory) p = top / notebook suffix = p.suffix if suffix not in policies: return False, None, None, None variants = policies[suffix] for variant in variants: s = p.parent / p.stem c = p.parent / (p.stem + variant) if c.exists(): return ( True, str(c.relative_to(top)), str(s.relative_to(top)), # with jupytext in the picture, # any .md file can be opened as a notebook # variant != '.md' True) return False, None, None, None
def handle(self, *args, **kwargs): dry_run = kwargs['dry_run'] force = kwargs['force'] names = kwargs['name'] if not names: paths = (NBHROOT / "images").glob("nbhosting*.Dockerfile") names = [str(path.name) for path in paths] logger.info(f"{10*'='} found default names {names}") for name in names: self.build_core_image(name, dry_run, force)
def _fetch_course_custom_tracks(self): """ locate and load <course>/nbhosting/tracks.py objective is to make this customizable so that some notebooks in the repo can be ignored and the others organized along different view points the tracks() function will receive self as its single parameter it is expected to return a dictionary track_name -> Track instance see flotpython/nbhosting/tracks.py for a realistic example the keys in this dictionary are used in the web interface to propose the list of available tracks absence of tracks.py, or inability to run it, triggers the default policy (per directory) implemented in model_track.py """ course_tracks_py = self.customized("tracks.py") if course_tracks_py: modulename = (f"{self.coursename}_tracks".replace("-", "_")) try: logger.debug(f"{self} loading module {course_tracks_py}") spec = spec_from_file_location( modulename, course_tracks_py, ) module = module_from_spec(spec) spec.loader.exec_module(module) tracks_fun = module.tracks logger.debug(f"triggerring {tracks_fun.__qualname__}()") tracks = tracks_fun(self) if self._check_tracks(tracks): return tracks except Exception: logger.exception(f"{self} could not do load custom tracks") finally: # make sure to reload the python code next time # we will need it, in case the course has published an update if modulename in sys.modules: del sys.modules[modulename] else: logger.info(f"{self} no tracks.py hook found") logger.warning(f"{self} resorting to generic filesystem-based track") return [generic_track(self)]
def handle(self, *args, **kwargs): coursenames = kwargs['coursenames'] if not coursenames: if kwargs['all']: coursenames = sorted( (cd.coursename for cd in CourseDir.objects.all())) else: print("must provide at least one course, or --all") exit(1) for coursename in coursenames: coursedir = CourseDir.objects.get(coursename=coursename) if not coursedir.is_valid(): logger.error(f"no such course {coursename}") return logger.info(f"{40*'='} pulling from git for {coursename}") coursedir.pull_from_git()
def handle(self, *args, **kwargs): dry_run = kwargs['dry_run'] force = kwargs['force'] coursenames = kwargs['coursenames'] if not coursenames: if kwargs['all']: coursenames = sorted( (cd.coursename for cd in CourseDir.objects.all())) else: print("must provide at least one course, or --all") exit(1) for coursename in coursenames: coursedir = CourseDir.objects.get(coursename=coursename) if not coursedir.is_valid(): logger.error(f"no such course {coursename}") return logger.info(f"{40*'='} building image for {coursename}") coursedir.build_image(force, dry_run)
def handle(self, *args, **kwargs): coursenames = kwargs['coursenames'] if not coursenames: if kwargs['all']: coursenames = sorted( (cd.coursename for cd in CourseDir.objects.all())) else: print("must provide at least one course, or --all") exit(1) for coursename in coursenames: coursedir = CourseDir.objects.get(coursename=coursename) if not coursedir.is_valid(): logger.error(f"no such course {coursename}") continue if not coursedir.autopull: logger.info(f"course {coursename} has not opted for autopull") continue logger.info(f"{40*'='} pulling from git with course {coursename}") coursedir.pull_from_git() # xxx we may need some exclusion mechanism to avoid # having 2 builds running simultaneously if coursedir.autobuild: logger.info(f"autobuild: rebuilding {coursedir.coursename}") coursedir.run_builds('*')
def build_core_image(self, name, dry_run, force): logger.info(f"{10*'='} rebuilding core image {name}") images_dir = NBHROOT / "images" # trim if needed name = name.replace(".Dockerfile", "") # search candidates candidates = list(images_dir.glob(f"*{name}*Dockerfile")) if len(candidates) != 1: logger.error(f"Found {len(candidates)} matches for {name} - skipped") return dockerfile = candidates[0] # maybe better use git ls-files *.sh ? scripts = [ "nbhosting-image-root.sh", "nbhosting-image-jovyan.sh", "start-in-dir-as-uid.sh", ] plain_name = str(dockerfile.name).replace(".Dockerfile", "") image_name = plain_name.replace("nbhosting-", "nbhosting/") work_dir = f"/tmp/core-{plain_name}" show_and_run(f"rm -rf {work_dir}; mkdir -p {work_dir}", dry_run=dry_run) show_and_run(f"cp {dockerfile} {work_dir}", dry_run=dry_run) for script in scripts: path = images_dir / script if not path.exists(): logger.error(f"Could not spot script {script} for {name} - skipped") return show_and_run(f"cp {path} {work_dir}", dry_run=dry_run) force_tag = "" if not force else "--no-cache" build_command = f"cd {work_dir}; " build_command += f"podman build {force_tag}" build_command += f" -f {dockerfile.name} -t {image_name} ." show_and_run(build_command, dry_run=dry_run)
def log_completed_process(completed, subcommand): header = f"{10 * '='} {subcommand}" logger.info(f"{header} returned ==> {completed.returncode}") for field in ('stdout', 'stderr'): text = getattr(completed, field, 'undef') # nothing to show if not text: continue # implement policy for stderr if field == 'stderr': # config has requested to not log stderr at all if sitesettings.DEBUG_log_subprocess_stderr is None: continue # config requires stderr only for failed subprocesses if sitesettings.DEBUG_log_subprocess_stderr is False \ and completed.returncode == 0: continue logger.info(f"{header} - {field}") logger.info(text)
def show_and_run(command, *, dry_run=False): if not dry_run: logger.info(f"# {command}") os.system(command) else: logger.info(f"(DRY-RUN) # {command}")
def _jupyterdir_forward(request, coursename, student, jupyter_url): logger.info(f"jupyterdir_forward: jupyter_url={jupyter_url}") logger.info(f"jupyterdir_forward: GET={request.GET}") all_right, explanation = authorized(request) if not all_right: return HttpResponseForbidden( f"Access denied: {explanation}") # minimal filtering allowed_verbs = ( 'tree', # classic notebook 'lab', # jupyterlab 'git-pull', # nbgitpuller ) if not any(jupyter_url.startswith(verb) for verb in allowed_verbs): return HttpResponseForbidden( f"Access denied: verb not in {allowed_verbs} with {jupyter_url}") coursedir = CourseDir.objects.get(coursename=coursename) if not coursedir.is_valid(): return error_page( request, coursename, student, "n/a", f"no such coursename {coursename}" ) # nbh's subcommand subcommand = 'container-view-student-course-jupyterdir' # build command command = ['nbh', '-d', sitesettings.nbhroot] if DEBUG: command.append('-x') command.append(subcommand) # add arguments to the subcommand command += [student, coursename, coursedir.image] command_str = " ".join(command) logger.info(f"Running command {command_str}") completed = subprocess.run( command, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) log_completed_process(completed, subcommand) if completed.returncode != 0: message = failed_command_message(command_str, completed) return error_page( request, coursename, student, "jupyterdir", message) try: action, _container_name, actual_port, jupyter_token = completed.stdout.split() if completed.returncode != 0 or action.startswith("failed"): message = failed_command_message( command_str, completed, prefix="failed to spawn notebook container") header = failed_command_header(action) return error_page( request, coursename, student, "n/a", message, header) # remember that in events file for statistics # not yet implemented on the Stats side # Stats(coursename).record_open_notebook(student, notebook, action, actual_port) # redirect with same proto (http or https) as incoming scheme = request.scheme # get the host part of the incoming URL host = request.get_host() # remove initial port if present in URL if ':' in host: host, _ = host.split(':', 1) ########## forge a URL that nginx will intercept # port depends on scheme - we do not specify it # passing along course and student is for 'reset_from_origin' url = (f"{scheme}://{host}/{actual_port}/{jupyter_url}" f"?token={jupyter_token}" f"&course={coursename}&student={student}") for k, v in request.GET.items(): url += f"&{k}={v}" logger.info(f"jupyterdir_forward: redirecting to {url}") return HttpResponseRedirect(url) except Exception as exc: prefix = (f"exception when parsing output of nbh {subcommand}\n" f"{type(exc)}: {exc}") message = failed_command_message(command_str, completed, prefix=prefix) return error_page( request, coursename, student, "jupyterdir", message)
def share_notebook(request, course, student, notebook): """ the URL to create static snapshots; it is intended to be fetched through ajax * computes a hash for storing the output * runs nbconvert in the student's container * stores the result in /nbhosting/snapshots/<course>/<hash>.html * returns a JSON-encoded dict that is either * { url: "/snapshots/flotpython/5465789765789.html" } * or { error: "the error message" } """ coursedir = CourseDir.objects.get(coursename=course) student_dir = coursedir.student_dir(student) exists, with_ext, without_ext, is_notebook = locate_notebook(student_dir, notebook) if not exists or not is_notebook: message = f"cannot spot notebook {notebook}" return JsonResponse(dict(error=message)) ext = with_ext.replace(without_ext, "") # debug = f"{with_ext=} {without_ext=} {ext=}" # logger.info(debug) # nbconvert works only from a .ipynb # invoke jupytext to produce that one if ext != '.ipynb': with_original_ext = with_ext with_ext = f"{without_ext}.ipynb" command_str = f"cd {student_dir}; jupytext --to ipynb -o {with_ext} {with_original_ext}" logger.info(f"In {Path.cwd()}\n" f"-> Running command {command_str}") completed = subprocess.run( command_str, universal_newlines=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) log_completed_process(completed, 'jupytext') if completed.returncode != 0: message = failed_command_message(command_str, completed) return JsonResponse(dict(error=message)) # compute hash from the input, so that a second run on the same notebook # will override any previsouly published static snapshot hasher = hashlib.sha1(bytes(f'{course}-{student}-{without_ext}', encoding='utf-8')) hash = hasher.hexdigest() subcommand = 'container-share-student-course-notebook-in-hash' command = ['nbh', '-d', sitesettings.nbhroot] if DEBUG: command.append('-x') command.append(subcommand) command += [student, course, with_ext, hash] command_str = " ".join(command) logger.info(f"In {Path.cwd()}\n" f"-> Running command {' '.join(command)}") completed = subprocess.run( command, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) log_completed_process(completed, subcommand) if completed.returncode != 0: message = failed_command_message(command_str, completed) return JsonResponse(dict(error=message)) # expect the subcommand to write a url_path on its stdout url_path = completed.stdout.strip() logger.info(f"reading url_path={url_path}") # rebuild a full URL with proto and hostname, url = f"{request.scheme}://{request.get_host()}{url_path}" return JsonResponse(dict(url_path=url_path, url=url))
def _open_notebook(request, coursename, student, notebook, *, forcecopy, init_student_git): # pylint: disable=r0914 """ implement both edx_request and classroom_request that behave almost exactly the same """ ok, explanation = authorized(request) if not ok: return HttpResponseForbidden( f"Access denied: {explanation}") coursedir = CourseDir.objects.get(coursename=coursename) if not coursedir.is_valid(): return error_page( request, coursename, student, notebook, f"no such course `{coursename}'", header=True, ) # the ipynb extension is removed from the notebook name in urls.py exists, notebook_with_ext, _, is_genuine_notebook = \ locate_notebook(coursedir.git_dir, notebook) # second attempt from the student's space # in case the student has created it locally... if not exists: exists, notebook_with_ext, _, is_genuine_notebook = \ locate_notebook(coursedir.student_dir(student), notebook) if not exists: msg = f"notebook `{notebook}' not known in this course or student" return error_page(request, coursename, student, notebook, msg, header="notebook not found") # deal with concurrent requests on the same container # by using a shared memory (a redis cache) # starting_containers is the cache name # as configured in nbhosting.ini(.in) # in devel mode we don't have redis redis_cache = None try: import redis idling = 0.5 # just a safety in case our code would not release stuff properly expire_in_s = 15 def my_repr(timedelta): return f"{timedelta.seconds}s {timedelta.microseconds}µs" redis_cache = redis.Redis() container = f'{coursename}-x-{student}' for attempt in itertools.count(1): already = redis_cache.get(container) # good to go if not already: logger.info(f"{attempt=} going ahead with {container=} and {notebook=}") redis_cache.set(container, b'1') redis_cache.expire(container, expire_in_s) break # has the stored token expired ? logger.info(f"{attempt=} waiting for {idling=} because {container} is being started" f"with {container=} and {notebook=}") time.sleep(idling) except ModuleNotFoundError: # make sure this error does not go unnoticed in production if not DEBUG: raise else: pass subcommand = 'container-view-student-course-notebook' # build command command = ['nbh', '-d', sitesettings.nbhroot] if DEBUG: command.append('-x') command.append(subcommand) # propagate the forcecopy flag for reset_from_origin if forcecopy: command.append('-f') # propagate that a git initialization was requested # forcecopy has no effect in this case if init_student_git: command.append('-g') # a student repo gets cloned from local course git # for lower delays when updating, and removing issues # like accessing private repos from the students space ref_giturl = str(coursedir.git_dir) else: ref_giturl = coursedir.giturl # add arguments to the subcommand command += [student, coursename, notebook_with_ext, coursedir.image, ref_giturl] command_str = " ".join(command) logger.info(f'edxfront is running (DEBUG={DEBUG}): {command_str}') completed = subprocess.run( command, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) log_completed_process(completed, subcommand) try: action, _container_name, actual_port, jupyter_token = completed.stdout.split() if completed.returncode != 0 or action.startswith("failed"): message = failed_command_message( command_str, completed, prefix="failed to spawn notebook container") header = failed_command_header(action) return error_page( request, coursename, student, notebook, message, header) # remember that in events file for statistics Stats(coursename).record_open_notebook(student, notebook, action, actual_port) # redirect with same proto (http or https) as incoming scheme = request.scheme # get the host part of the incoming URL host = request.get_host() # remove initial port if present in URL if ':' in host: host, _ = host.split(':', 1) ########## forge a URL that nginx will intercept # passing along course and student is for 'reset_from_origin' if is_genuine_notebook: url = (f"{scheme}://{host}/{actual_port}/notebooks/" f"{notebook_with_ext}?token={jupyter_token}&" f"course={coursename}&student={student}") else: url = (f"{scheme}://{host}/{actual_port}/lab/tree/{notebook_with_ext}") logger.info(f"edxfront: redirecting to {url}") return HttpResponseRedirect(url) except Exception as exc: prefix = (f"exception when parsing output of nbh {subcommand}\n" f"{type(exc)}: {exc}") message = failed_command_message(command_str, completed, prefix=prefix) return error_page( request, coursename, student, notebook, message) finally: if redis_cache: redis_cache.delete(container)
def handle(self, *args, **kwargs): preserve = kwargs['preserve'] patterns = kwargs['patterns'] selected = sorted(CourseDir.courses_by_patterns(patterns)) yaml = kwargs['yaml'] logger.info(f"preserve mode = {preserve}") for coursedir in selected: if not preserve: cache = (coursedir.notebooks_dir / ".tracks.json") if cache.exists(): logger.info(f"clearing cache {cache}") cache.unlink() tracks = coursedir.tracks() for track in tracks: logger.info( f"track: {track.name}[id={track.id}]: {track.description}") logger.info(f" {track.describe()}") for section in track.sections: logger.info(f"{4*' '}section: {section.name}") for notebook in section.notebooks: logger.info(f"{8*' '}* {notebook.notebookname}") if yaml: outname = f"{coursedir.coursename}.yaml" with open(outname, 'w') as out: out.write(to_yaml(tracks)) print(f"overwrote {outname}") return 0
def auditor_show_notebook(request, course, notebook=None, track=None, jupyter_app=None): """ the auditor notebook view has 2 major modes; either a jupyter app is selected, or a track is selected in the latter case a notebook can be passed as well we should have either * jupyter_app set to ' classic' or 'jlab' in that case, track and notebook are totally discarded * otherwise if jupyter_app is not set, then we may have track set to a trackname and if not, we select the first track in the course in that case (no jupyter_app) a notebook arg can be passed it is then interpreted as a path from the git repo root """ logger.info(f"auditor_show_notebook track={track} jupyter_app={jupyter_app} " f"notebook={notebook}") student = request.user.username # don't want to mess with the urls trackname = track coursedir = get_object_or_404(CourseDir, coursename=course) # do this in both jupyter and track mode if not trackname: trackname = coursedir.default_trackname() track = coursedir.track(trackname) if notebook is None: notebook_obj = track.first_notebook() notebook = notebook_obj.clean_path() print(f"default notebook -> {notebook}") else: notebook_obj = track.spot_notebook(notebook) # compute title as notebookname if found in sections if jupyter_app == 'classic': title = 'Jupyter classic' iframe = f'/ipythonForward/{course}/{student}/tree' elif jupyter_app == 'jlab': title = 'Jupyter lab' iframe = f'/ipythonForward/{course}/{student}/lab' else: title = notebook_obj.notebookname if notebook_obj else notebook iframe = f"/notebookGitRepo/{course}/{notebook}/{student}" # giturl = coursedir.giturl # gitpull_url = (f"/ipythonForward/{course}/{student}/git-pull" # f"?repo={giturl}" # f"&autoRedirect=false" # f"&toplevel=." # f"&redirectUrl={iframe}" # ) tracks = coursedir.tracks() for track_obj in tracks: track_obj.mark_notebooks(student) first_notebook_per_track = { track_obj.id : track_obj.first_notebook().clean_path() for track_obj in tracks } env = dict( nbh_version=nbh_version, coursename=course, student=student, coursedir=coursedir, track=track, jupyter_app=jupyter_app, notebook=notebook, iframe=iframe, # gitpull_url=gitpull_url, head_title=f"nbh:{course}", title=title, tracks=tracks, first_notebook_per_track=first_notebook_per_track, ) return render(request, "auditor-notebook.html", env)
def handle(self, *args, **kwargs): preserve = kwargs['preserve'] coursename = kwargs['coursename'] try: coursedir = CourseDir.objects.get(coursename=coursename) except CourseDir.DoesNotExist: logger.error(f"course not found {coursename}") return 1 logger.info(f"preserve mode = {preserve}") if not preserve: cache = (coursedir.notebooks_dir / ".tracks.json") if cache.exists(): logger.info(f"clearing cache {cache}") cache.unlink() tracks = coursedir.tracks() for track in tracks: logger.info( f"track: {track.name}[id={track.id}]: {track.description}") logger.info(f" {track.describe()}") for section in track.sections: logger.info(f"{4*' '}section: {section.name}") for notebook in section.notebooks: logger.info(f"{8*' '}* {notebook.notebookname}")
def run_build( self, build: Build, *, # pylint: disable=too-many-locals dry_run=False, force=False): """ execute one of the builds provided in nbhosting.yaml * preparation: create a launcher script called .clone-build-rsync.sh in NBHROOT/builds/<coursename>/<buildid>/<githash>/ this script contains the 'script' part defined in YAML surrounded with some pre- and post- code * start a podman container with the relevant areas bind-mounted namely the git repo - mounted read-only - and the build area mentioned above return True if build is done or redone successfully """ coursename = self.coursename githash = self.current_hash() buildid = build.id script = build.script # pylint: disable=unused-variable directory = build.directory # pylint: disable=unused-variable result_folder = build.result_folder # pylint: disable=unused-variable entry_point = build.entry_point # pylint: disable=unused-variable build_path = Path(self.build_dir) / buildid / githash if build_path.exists(): if not build_path.is_dir(): logger.error( f"{build_path} exists and is not a dir - build aborted") return False if not force: logger.warning( f"build {build_path} already present - run with --force to override" ) return False logger.info(f"removing existing build (--force) {build_path}") shutil.rmtree(str(build_path)) variables = "NBHROOT+coursename+script+directory+result_folder" # oddly enough a dict comprehension won't work here, # saying the variable names are undefined... vars_ = {} for var in variables.split('+'): vars_[var] = eval(var) # pylint: disable=eval-used template = get_template("scripts/dot-clone-build-rsync.sh") expanded_script = template.render(vars_) host_trigger = build_path / ".clone-build-rsync.sh" host_log = host_trigger.with_suffix(".log") host_trigger.parent.mkdir(parents=True, exist_ok=True) with host_trigger.open('w') as writer: writer.write(expanded_script) container = f"{coursename}-xbuildx-{buildid}-{githash}" podman_c = f"" podman_c += f" podman run --rm" podman_c += f" --name {container}" # mount git repo podman_c += f" -v {self.git_dir}:{self.git_dir}" # ditto under its normalized name if needed if self.norm_git_dir != self.git_dir: podman_c += f" -v {self.norm_git_dir}:{self.norm_git_dir}" # mount subdir of NBHROOT/builds podman_c += f" -v {host_trigger.parent}:/home/jovyan/building" podman_c += f" {self.image}" podman_c += f" bash /home/jovyan/building/.clone-build-rsync.sh" podman_c += f" > {host_log} 2>&1" success = show_and_run(podman_c, dry_run=dry_run) if dry_run: logger.info(f"(DRY-RUN) Build script is in {host_trigger}") else: logger.info(f"See complete log in {host_log}") if success: # move latest symlink latest = Path(self.build_dir) / buildid / "latest" latest.exists() and latest.unlink() latest.symlink_to(Path(githash), target_is_directory=True) logger.info(f"{latest} updated ") return success
def clear(path): if verbose: logger.info(f"Clearing {path}") shutil.rmtree(path)
def _open_notebook(request, coursename, student, notebook, *, forcecopy, init_student_git): # pylint: disable=r0914 """ implement both edx_request and classroom_request that behave almost exactly the same """ ok, explanation = authorized(request) if not ok: return HttpResponseForbidden( f"Access denied: {explanation}") coursedir = CourseDir.objects.get(coursename=coursename) if not coursedir.is_valid(): return error_page( request, coursename, student, notebook, f"no such course `{coursename}'", header=True, ) # the ipynb extension is removed from the notebook name in urls.py exists, notebook_with_ext, _, is_genuine_notebook = \ locate_notebook(coursedir.git_dir, notebook) # second attempt from the student's space # in case the student has created it locally... if not exists: exists, notebook_with_ext, _, is_genuine_notebook = \ locate_notebook(coursedir.student_dir(student), notebook) if not exists: msg = f"notebook `{notebook}' not known in this course or student" return error_page(request, coursename, student, notebook, msg, header="notebook not found") subcommand = 'container-view-student-course-notebook' # build command command = ['nbh', '-d', sitesettings.nbhroot] if DEBUG: command.append('-x') command.append(subcommand) # propagate the forcecopy flag for reset_from_origin if forcecopy: command.append('-f') # propagate that a git initialization was requested # forcecopy has no effect in this case if init_student_git: command.append('-g') # a student repo gets cloned from local course git # for lower delays when updating, and removing issues # like accessing private repos from the students space ref_giturl = str(coursedir.git_dir) else: ref_giturl = coursedir.giturl # add arguments to the subcommand command += [student, coursename, notebook_with_ext, coursedir.image, ref_giturl] command_str = " ".join(command) logger.info(f'edxfront is running: {command_str} DEBUG={DEBUG}') completed = subprocess.run( command, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) log_completed_process(completed, subcommand) try: action, _container_name, actual_port, jupyter_token = completed.stdout.split() if completed.returncode != 0 or action.startswith("failed"): message = failed_command_message( command_str, completed, prefix="failed to spawn notebook container") header = failed_command_header(action) return error_page( request, coursename, student, notebook, message, header) # remember that in events file for statistics Stats(coursename).record_open_notebook(student, notebook, action, actual_port) # redirect with same proto (http or https) as incoming scheme = request.scheme # get the host part of the incoming URL host = request.get_host() # remove initial port if present in URL if ':' in host: host, _ = host.split(':', 1) ########## forge a URL that nginx will intercept # passing along course and student is for 'reset_from_origin' if is_genuine_notebook: url = (f"{scheme}://{host}/{actual_port}/notebooks/" f"{notebook_with_ext}?token={jupyter_token}&" f"course={coursename}&student={student}") else: url = (f"{scheme}://{host}/{actual_port}/lab/tree/{notebook_with_ext}") logger.info(f"edxfront: redirecting to {url}") return HttpResponseRedirect(url) except Exception as exc: prefix = (f"exception when parsing output of nbh {subcommand}\n" f"{type(exc)}: {exc}") message = failed_command_message(command_str, completed, prefix=prefix) return error_page( request, coursename, student, notebook, message)