def _daemon(cls, jupyter_notebook_filename): from trains import Task # load jupyter notebook package # noinspection PyBroadException try: from nbconvert.exporters.script import ScriptExporter _script_exporter = ScriptExporter() except Exception: return # load pigar # noinspection PyBroadException try: from pigar.reqs import get_installed_pkgs_detail, file_import_modules from pigar.modules import ReqsModules from pigar.log import logger logger.setLevel(logging.WARNING) except Exception: file_import_modules = None # load IPython # noinspection PyBroadException try: from IPython import get_ipython except Exception: # should not happen get_ipython = None # setup local notebook files if jupyter_notebook_filename: notebook = Path(jupyter_notebook_filename) local_jupyter_filename = jupyter_notebook_filename else: notebook = None fd, local_jupyter_filename = mkstemp(suffix='.ipynb') os.close(fd) last_update_ts = None counter = 0 prev_script_hash = None # main observer loop, check if we need to exit while not cls._exit_event.wait(timeout=0.): # wait for timeout or sync event cls._sync_event.wait(cls._sample_frequency if counter else cls. _first_sample_frequency) cls._sync_event.clear() counter += 1 # noinspection PyBroadException try: # if there is no task connected, do nothing task = Task.current_task() if not task: continue # if we have a local file: if notebook: if not notebook.exists(): continue # check if notebook changed if last_update_ts is not None and notebook.stat( ).st_mtime - last_update_ts <= 0: continue last_update_ts = notebook.stat().st_mtime else: # serialize notebook to a temp file # noinspection PyBroadException try: get_ipython().run_line_magic('notebook', local_jupyter_filename) except Exception as ex: continue # get notebook python script script_code, resources = _script_exporter.from_filename( local_jupyter_filename) current_script_hash = hash(script_code) if prev_script_hash and prev_script_hash == current_script_hash: continue requirements_txt = '' conda_requirements = '' # parse jupyter python script and prepare pip requirements (pigar) # if backend supports requirements if file_import_modules and Session.check_min_api_version( '2.2'): fmodules, _ = file_import_modules(notebook.parts[-1], script_code) fmodules = ScriptRequirements.add_trains_used_packages( fmodules) installed_pkgs = get_installed_pkgs_detail() reqs = ReqsModules() for name in fmodules: if name in installed_pkgs: pkg_name, version = installed_pkgs[name] reqs.add(pkg_name, version, fmodules[name]) requirements_txt, conda_requirements = ScriptRequirements.create_requirements_txt( reqs) # update script prev_script_hash = current_script_hash data_script = task.data.script data_script.diff = script_code data_script.requirements = { 'pip': requirements_txt, 'conda': conda_requirements } task._update_script(script=data_script) # update requirements task._update_requirements(requirements=requirements_txt) except Exception: pass
def _daemon(cls, jupyter_notebook_filename): from trains import Task # load jupyter notebook package # noinspection PyBroadException try: from nbconvert.exporters.script import ScriptExporter _script_exporter = ScriptExporter() except Exception: return # load pigar # noinspection PyBroadException try: from pigar.reqs import get_installed_pkgs_detail, file_import_modules from pigar.modules import ReqsModules from pigar.log import logger logger.setLevel(logging.WARNING) except Exception: file_import_modules = None # main observer loop notebook = Path(jupyter_notebook_filename) last_update_ts = None counter = 0 prev_script_hash = None while True: if cls._exit_event.wait(cls._sample_frequency if counter else cls. _first_sample_frequency): return counter += 1 # noinspection PyBroadException try: if not notebook.exists(): continue # check if notebook changed if last_update_ts is not None and notebook.stat( ).st_mtime - last_update_ts <= 0: continue last_update_ts = notebook.stat().st_mtime task = Task.current_task() if not task: continue # get notebook python script script_code, resources = _script_exporter.from_filename( jupyter_notebook_filename) current_script_hash = hash(script_code) if prev_script_hash and prev_script_hash == current_script_hash: continue requirements_txt = '' # parse jupyter python script and prepare pip requirements (pigar) # if backend supports requirements if file_import_modules and Session.api_version > '2.1': fmodules, _ = file_import_modules(notebook.parts[-1], script_code) installed_pkgs = get_installed_pkgs_detail() reqs = ReqsModules() for name in fmodules: if name in installed_pkgs: pkg_name, version = installed_pkgs[name] reqs.add(pkg_name, version, fmodules[name]) requirements_txt = ScriptRequirements.create_requirements_txt( reqs) # update script prev_script_hash = current_script_hash data_script = task.data.script data_script.diff = script_code data_script.requirements = {'pip': requirements_txt} task._update_script(script=data_script) # update requirements if requirements_txt: task._update_requirements(requirements=requirements_txt) except Exception: pass