def to_work_dir(self): wd = self.work_dir if unicode_type(wd) != py3compat.getcwd(): os.chdir(wd) self.log.info("Changing to working dir: %s" % wd) # This is the working dir by now. sys.path.insert(0, '')
def update_profiles(self): """List all profiles in the ipython_dir and cwd. """ try: from IPython.paths import get_ipython_dir from IPython.core.profileapp import list_profiles_in except ImportError as e: self.log.info("IPython not available: %s", e) return stale = set(self.profiles) for path in [get_ipython_dir(), py3compat.getcwd()]: for profile in list_profiles_in(path): if profile in stale: stale.remove(profile) pd = self.get_profile_dir(profile, path) if profile not in self.profiles: self.log.debug("Adding cluster profile '%s'", profile) self.profiles[profile] = { 'profile': profile, 'profile_dir': pd, 'status': 'stopped' } for profile in stale: # remove profiles that no longer exist self.log.debug("Profile '%s' no longer exists", profile) self.profiles.pop(profile)
def build_config(entrypoint, include_none=False): if entrypoint not in entrypoint_configurables: raise ValueError('Config for entrypoint name %r is not defined! Accepted values are %r.' % ( entrypoint, list(entrypoint_configurables.keys()) )) # Get config from disk: disk_config = {} path = jupyter_config_path() path.insert(0, py3compat.getcwd()) for c in _load_config_files('nbdime_config', path=path): recursive_update(disk_config, c, include_none) config = {} configurable = entrypoint_configurables[entrypoint] for c in reversed(configurable.mro()): if issubclass(c, NbdimeConfigurable): recursive_update(config, config_instance(c).configured_traits(c), include_none) if (c.__name__ in disk_config): recursive_update(config, disk_config[c.__name__], include_none) return config
def from_notebook_node(self, nb, resources=None, **kw): latex, resources = super(PDFExporter, self).from_notebook_node( nb, resources=resources, **kw ) # set texinputs directory, so that local files will be found if resources and resources.get('metadata', {}).get('path'): self.texinputs = resources['metadata']['path'] else: self.texinputs = getcwd() self._captured_outputs = [] with TemporaryWorkingDirectory(): notebook_name = 'notebook' resources['output_extension'] = '.tex' tex_file = self.writer.write(latex, resources, notebook_name=notebook_name) self.log.info("Building PDF") rc = self.run_latex(tex_file) if rc: rc = self.run_bib(tex_file) if rc: rc = self.run_latex(tex_file) pdf_file = notebook_name + '.pdf' if not os.path.isfile(pdf_file): raise LatexFailed('\n'.join(self._captured_output)) self.log.info('PDF successfully created') with open(pdf_file, 'rb') as f: pdf_data = f.read() # convert output extension to pdf # the writer above required it to be tex resources['output_extension'] = '.pdf' # clear figure outputs, extracted by latex export, # so we don't claim to be a multi-file export. resources.pop('outputs', None) return pdf_data, resources
def _notebook_dir_default(self): if self.file_to_run: return os.path.dirname(os.path.abspath(self.file_to_run)) else: return py3compat.getcwd()
def config_file_paths(self): path = jupyter_config_path() if self.config_dir not in path: path.insert(0, self.config_dir) path.insert(0, py3compat.getcwd()) return path
def _default_root_dir(self): try: return self.parent.notebook_dir except AttributeError: return getcwd()
def _root_dir_default(self): try: return self.parent.root_dir except AttributeError: return getcwd()
def _default_root_dir(self): if self.notebook_path: return os.path.dirname(os.path.abspath(self.notebook_path)) else: return getcwd()
class BaseParallelApplication(BaseIPythonApplication): """The base Application for ipyparallel apps Principle extensions to BaseIPyythonApplication: * work_dir * remote logging via pyzmq * IOLoop instance """ crash_handler_class = ParallelCrashHandler def _log_level_default(self): # temporarily override default_log_level to INFO return logging.INFO def _log_format_default(self): """override default log format to include time""" return u"%(asctime)s.%(msecs).03d [%(name)s]%(highlevel)s %(message)s" work_dir = Unicode(py3compat.getcwd(), config=True, help='Set the working dir for the process.') def _work_dir_changed(self, name, old, new): self.work_dir = unicode_type(expand_path(new)) log_to_file = Bool(config=True, help="whether to log to a file") clean_logs = Bool(False, config=True, help="whether to cleanup old logfiles before starting") log_url = Unicode('', config=True, help="The ZMQ URL of the iplogger to aggregate logging.") cluster_id = Unicode( '', config=True, help= """String id to add to runtime files, to prevent name collisions when using multiple clusters with a single profile simultaneously. When set, files will be named like: 'ipcontroller-<cluster_id>-engine.json' Since this is text inserted into filenames, typical recommendations apply: Simple character strings are ideal, and spaces are not recommended (but should generally work). """) def _cluster_id_changed(self, name, old, new): self.name = self.__class__.name if new: self.name += '-%s' % new def _config_files_default(self): return [ 'ipcontroller_config.py', 'ipengine_config.py', 'ipcluster_config.py' ] loop = Instance('zmq.eventloop.ioloop.IOLoop') def _loop_default(self): from zmq.eventloop.ioloop import IOLoop return IOLoop.instance() aliases = Dict(base_aliases) flags = Dict(base_flags) @catch_config_error def initialize(self, argv=None): """initialize the app""" super(BaseParallelApplication, self).initialize(argv) self.to_work_dir() self.reinit_logging() def to_work_dir(self): wd = self.work_dir if unicode_type(wd) != py3compat.getcwd(): os.chdir(wd) self.log.info("Changing to working dir: %s" % wd) # This is the working dir by now. sys.path.insert(0, '') def reinit_logging(self): # Remove old log files log_dir = self.profile_dir.log_dir if self.clean_logs: for f in os.listdir(log_dir): if re.match(r'%s-\d+\.(log|err|out)' % self.name, f): try: os.remove(os.path.join(log_dir, f)) except (OSError, IOError): # probably just conflict from sibling process # already removing it pass if self.log_to_file: # Start logging to the new log file log_filename = self.name + u'-' + str(os.getpid()) + u'.log' logfile = os.path.join(log_dir, log_filename) open_log_file = open(logfile, 'w') else: open_log_file = None if open_log_file is not None: while self.log.handlers: self.log.removeHandler(self.log.handlers[0]) self._log_handler = logging.StreamHandler(open_log_file) self.log.addHandler(self._log_handler) else: self._log_handler = self.log.handlers[0] # Add timestamps to log format: self._log_formatter = LevelFormatter(self.log_format, datefmt=self.log_datefmt) self._log_handler.setFormatter(self._log_formatter) # do not propagate log messages to root logger # ipcluster app will sometimes print duplicate messages during shutdown # if this is 1 (default): self.log.propagate = False def write_pid_file(self, overwrite=False): """Create a .pid file in the pid_dir with my pid. This must be called after pre_construct, which sets `self.pid_dir`. This raises :exc:`PIDFileError` if the pid file exists already. """ pid_file = os.path.join(self.profile_dir.pid_dir, self.name + u'.pid') if os.path.isfile(pid_file): pid = self.get_pid_from_file() if not overwrite: raise PIDFileError( 'The pid file [%s] already exists. \nThis could mean that this ' 'server is already running with [pid=%s].' % (pid_file, pid)) with open(pid_file, 'w') as f: self.log.info("Creating pid file: %s" % pid_file) f.write(repr(os.getpid()) + '\n') def remove_pid_file(self): """Remove the pid file. This should be called at shutdown by registering a callback with :func:`reactor.addSystemEventTrigger`. This needs to return ``None``. """ pid_file = os.path.join(self.profile_dir.pid_dir, self.name + u'.pid') if os.path.isfile(pid_file): try: self.log.info("Removing pid file: %s" % pid_file) os.remove(pid_file) except: self.log.warn("Error removing the pid file: %s" % pid_file) def get_pid_from_file(self): """Get the pid from the pid file. If the pid file doesn't exist a :exc:`PIDFileError` is raised. """ pid_file = os.path.join(self.profile_dir.pid_dir, self.name + u'.pid') if os.path.isfile(pid_file): with open(pid_file, 'r') as f: s = f.read().strip() try: pid = int(s) except: raise PIDFileError("invalid pid file: %s (contents: %r)" % (pid_file, s)) return pid else: raise PIDFileError('pid file not found: %s' % pid_file) def check_pid(self, pid): try: return check_pid(pid) except Exception: self.log.warn("Could not determine whether pid %i is running. " " Making the likely assumption that it is." % pid) return True
def config_file_paths(self): return [py3compat.getcwd(), self.config_dir] + SYSTEM_CONFIG_DIRS
def config_file_paths(): path = jupyter_config_path() if jupyter_config_dir() not in path: path.insert(0, jupyter_config_dir()) path.insert(0, py3compat.getcwd()) return path