def __init__(self, notebook_file=None, output_file="fairing_output_notebook.ipynb", input_files=None, command=None, path_prefix=constants.DEFAULT_DEST_PREFIX, output_map=None): if notebook_file is None and notebook_util.is_in_notebook(): notebook_file = notebook_util.get_notebook_name() if notebook_file is None: raise ValueError('A notebook_file must be provided.') relative_notebook_file = notebook_file # Convert absolute notebook path to relative path if os.path.isabs(notebook_file[0]): relative_notebook_file = os.path.relpath(notebook_file) if command is None: command = [ "papermill", relative_notebook_file, output_file, "--log-output" ] input_files = input_files or [] if relative_notebook_file not in input_files: input_files.append(relative_notebook_file) super().__init__(executable=None, input_files=input_files, command=command, output_map=output_map, path_prefix=path_prefix)
def set_preprocessor(self, name=None, **kwargs): if name is None: if notebook_util.is_in_notebook(): name = 'notebook' else: name = DEFAULT_PREPROCESSOR preprocessor = preprocessor_map.get(name) self._preprocessor = preprocessor(**kwargs)
def __init__(self, function_obj, path_prefix=constants.DEFAULT_DEST_PREFIX, output_map=None, input_files=None): super().__init__( output_map=output_map, path_prefix=path_prefix, input_files=input_files) if not notebook_util.is_in_notebook(): logger.warning("The FunctionPreProcessor is optimized for using in a notebook or IPython environment. " "For it to work, the python version should be same for both local python and the python in " "the docker. Please look at alternatives like BasePreprocessor or FullNotebookPreprocessor.") if get_execution_obj_type(function_obj) == ObjectType.NOT_SUPPORTED: raise RuntimeError("Object must of type function or a class") fairing_dir = os.path.dirname(fairing.__file__) self.output_map[os.path.join(fairing_dir, "functions", FUNCTION_SHIM)] = \ os.path.join(path_prefix, FUNCTION_SHIM) # Make sure fairing can use imported as a module self.output_map[os.path.join(fairing_dir, '__init__.py')] = \ os.path.join(path_prefix, "fairing", '__init__.py') # Make sure cloudpickle can be imported as a module cloudpickle_dir = os.path.dirname(cloudpickle.__file__) self.output_map[os.path.join(cloudpickle_dir, '__init__.py')] = \ os.path.join(path_prefix, "cloudpickle", '__init__.py') self.output_map[os.path.join(cloudpickle_dir, 'cloudpickle.py')] = \ os.path.join(path_prefix, "cloudpickle", 'cloudpickle.py') _, temp_payload_file = tempfile.mkstemp() with open(temp_payload_file, "wb") as f: cloudpickle.dump(function_obj, f) # Adding the serialized file to the context payload_file_in_context = os.path.join(path_prefix, SERIALIZED_FN_FILE) self.output_map[temp_payload_file] = payload_file_in_context # TODO(@karthikv2k): Ref #122 Find a better way to support deployer specific preprocessing _, temp_payload_wrapper_file = tempfile.mkstemp() with open(temp_payload_wrapper_file, "w") as f: contents = OUTPUT_FILE.format(OBJ_NAME=function_obj.__name__, SERIALIZED_FN_FILE=SERIALIZED_FN_FILE) f.write(contents) # Adding the serialized file to the context payload_wrapper_file_in_context = os.path.join(path_prefix, function_obj.__name__ + ".py") self.output_map[temp_payload_wrapper_file] = payload_wrapper_file_in_context local_python_version = ".".join([str(x) for x in sys.version_info[0:3]]) self.command = ["python", os.path.join(self.path_prefix, FUNCTION_SHIM), "--serialized_fn_file", payload_file_in_context, "--python_version", local_python_version]
def reset(self): if notebook_util.is_in_notebook(): self._preprocessor_name = 'notebook' else: self._preprocessor_name = DEFAULT_PREPROCESSOR self._preprocessor_kwargs = {} self._builder_name = DEFAULT_BUILDER self._builder_kwargs = {} self._deployer_name = DEFAULT_DEPLOYER self._deployer_kwargs = {}
def __init__(self, notebook_file=None, command=["jupyter", "nbconvert", "--stdout", "--to", "notebook", "--execute"], path_prefix=constants.DEFAULT_DEST_PREFIX, output_map=None): if notebook_file is None and notebook_util.is_in_notebook(): notebook_file = notebook_util.get_notebook_name() super().__init__( executable=notebook_file, input_files=[notebook_file], command=command, output_map=output_map, path_prefix=path_prefix)
def __init__(self, notebook_file=None, notebook_preprocessor=FilterMagicCommands, executable=None, command=["python"], path_prefix=constants.DEFAULT_DEST_PREFIX, output_map={}): super().__init__(executable=executable, input_files=[], output_map=output_map, path_prefix=path_prefix) if notebook_file is None and notebook_util.is_in_notebook(): notebook_file = notebook_util.get_notebook_name() self.notebook_file = notebook_file self.notebook_preprocessor = notebook_preprocessor
def __init__(self, #pylint:disable=dangerous-default-value notebook_file=None, notebook_preprocessor=FilterMagicCommands, executable=None, command=["python"], path_prefix=constants.DEFAULT_DEST_PREFIX, output_map=None, overwrite=True): super().__init__( executable=executable, input_files=[], output_map=output_map, path_prefix=path_prefix) if notebook_file is None and notebook_util.is_in_notebook(): notebook_file = notebook_util.get_notebook_name() self.notebook_file = notebook_file self.notebook_preprocessor = notebook_preprocessor self.overwrite = overwrite
def __init__(self, notebook_file=None, input_files=None, command=[ "jupyter", "nbconvert", "--stdout", "--to", "notebook", "--execute", "--allow-errors", "--ExecutePreprocessor.timeout=-1" ], path_prefix=constants.DEFAULT_DEST_PREFIX, output_map=None): if notebook_file is None and notebook_util.is_in_notebook(): notebook_file = notebook_util.get_notebook_name() input_files = input_files or [] if notebook_file not in input_files: input_files.append(notebook_file) super().__init__(executable=notebook_file, input_files=input_files, command=command, output_map=output_map, path_prefix=path_prefix)