class RevealServer(LoggingConfigurable): """Post processor designed to serve files Proxies reveal.js requests to a CDN if no local reveal.js is present """ open_in_browser = Bool(True, help="""Should the browser be opened automatically?""" ).tag(config=True) reveal_cdn = Unicode("https://cdnjs.cloudflare.com/ajax/libs/reveal.js/3.1.0", help="""URL for reveal.js CDN.""" ).tag(config=True) reveal_prefix = Unicode("reveal.js", help="URL prefix for reveal.js").tag(config=True) ip = Unicode("127.0.0.1", help="The IP address to listen on.").tag(config=True) port = Int(8000, help="port for the server to listen on.").tag(config=True) def serve(self, input): """Serve the build directory with a webserver.""" if not os.path.exists(input): logging.error('the html path does not exist: {}'.format(input)) raise IOError('the html path does not exist: {}'.format(input)) dirname, filename = os.path.split(input) handlers = [ (r"/(.+)", web.StaticFileHandler, {'path': dirname}), (r"/", web.RedirectHandler, {"url": "/%s" % filename}) ] if '://' in self.reveal_prefix or self.reveal_prefix.startswith("//"): # reveal specifically from CDN, nothing to do pass elif os.path.isdir(os.path.join(dirname, self.reveal_prefix)): # reveal prefix exists self.log.info("Serving local %s", self.reveal_prefix) logging.info("Serving local %s", self.reveal_prefix) else: self.log.info("Redirecting %s requests to %s", self.reveal_prefix, self.reveal_cdn) logging.info("Redirecting %s requests to %s", self.reveal_prefix, self.reveal_cdn) handlers.insert(0, (r"/(%s)/(.*)" % self.reveal_prefix, ProxyHandler)) app = web.Application(handlers, cdn=self.reveal_cdn, client=AsyncHTTPClient(), ) # hook up tornado logging to our logger log.app_log = self.log http_server = httpserver.HTTPServer(app) # find an available port port_attempts = list(range(10)) for port_attempt in port_attempts: try: url = "http://%s:%i/%s" % (self.ip, self.port, filename) logging.info("Attempting to serve at %s" % url) http_server.listen(self.port, address=self.ip) break except IOError: self.port += 1 if port_attempt == port_attempts[-1]: logging.error('no port available to launch slides on, try closing some slideshows') raise IOError('no port available to launch slides on, try closing some slideshows') logging.info("Serving your slides at %s" % url) logging.info("Use Control-C to stop this server") # don't let people press ctrl-z, which leaves port open def handler(signum, frame): logging.info('Control-Z pressed, but ignored, use Control-C!') signal.signal(signal.SIGTSTP, handler) if self.open_in_browser: webbrowser.open(url, new=2) try: ioloop.IOLoop.instance().start() except KeyboardInterrupt: # ioloop.IOLoop.instance().stop() #dosen't look like this is necessary logging.info("\nInterrupted")
class Figure(ipywebrtc.MediaStream): """Widget class representing a volume (rendering) using three.js""" _view_name = Unicode('FigureView').tag(sync=True) _view_module = Unicode('ipyvolume').tag(sync=True) _model_name = Unicode('FigureModel').tag(sync=True) _model_module = Unicode('ipyvolume').tag(sync=True) _view_module_version = Unicode(semver_range_frontend).tag(sync=True) _model_module_version = Unicode(semver_range_frontend).tag(sync=True) eye_separation = traitlets.CFloat(6.4).tag(sync=True) scatters = traitlets.List(traitlets.Instance(Scatter), [], allow_none=False).tag(sync=True, **ipywidgets.widget_serialization) meshes = traitlets.List(traitlets.Instance(Mesh), [], allow_none=False).tag(sync=True, **ipywidgets.widget_serialization) volumes = traitlets.List(traitlets.Instance(Volume), [], allow_none=False).tag(sync=True, **ipywidgets.widget_serialization) animation = traitlets.Float(1000.0).tag(sync=True) animation_exponent = traitlets.Float(1.0).tag(sync=True) ambient_coefficient = traitlets.Float(0.5).tag(sync=True) diffuse_coefficient = traitlets.Float(0.8).tag(sync=True) specular_coefficient = traitlets.Float(0.5).tag(sync=True) specular_exponent = traitlets.Float(5).tag(sync=True) stereo = traitlets.Bool(False).tag(sync=True) camera_control = traitlets.Unicode(default_value='trackball').tag(sync=True) camera_fov = traitlets.CFloat(45,min=0.1,max=179.9).tag(sync=True) camera_center = traitlets.List(traitlets.CFloat, default_value=[0, 0, 0]).tag(sync=True) #Tuple(traitlets.CFloat(0), traitlets.CFloat(0), traitlets.CFloat(0)).tag(sync=True) camera = traitlets.Instance(pythreejs.Camera, allow_none=True, help='A :any:`pythreejs.Camera` instance to control the camera')\ .tag(sync=True, **ipywidgets.widget_serialization) @traitlets.default('camera') def _default_camera(self): # see https://github.com/maartenbreddels/ipyvolume/pull/40 for an explanation z = 2 * np.tan(45./2.*np.pi/180) / np.tan(self.camera_fov/2.*np.pi/180) return pythreejs.PerspectiveCamera(fov=self.camera_fov, position=(0, 0, z), width=400, height=500) scene = traitlets.Instance(pythreejs.Scene, allow_none=True).tag(sync=True, **ipywidgets.widget_serialization) @traitlets.default('scene') def _default_scene(self): # could be removed when https://github.com/jovyan/pythreejs/issues/176 is solved # the default for pythreejs is white, which leads the volume rendering pass to make everything white return pythreejs.Scene(background=None) width = traitlets.CInt(500).tag(sync=True) height = traitlets.CInt(400).tag(sync=True) downscale = traitlets.CInt(1).tag(sync=True) displayscale = traitlets.CFloat(1).tag(sync=True) capture_fps = traitlets.CFloat(None, allow_none=True).tag(sync=True) cube_resolution = traitlets.CInt(512).tag(sync=True) show = traitlets.Unicode("Volume").tag(sync=True) # for debugging xlim = traitlets.List(traitlets.CFloat, default_value=[0, 1], minlen=2, maxlen=2).tag(sync=True) ylim = traitlets.List(traitlets.CFloat, default_value=[0, 1], minlen=2, maxlen=2).tag(sync=True) zlim = traitlets.List(traitlets.CFloat, default_value=[0, 1], minlen=2, maxlen=2).tag(sync=True) matrix_projection = traitlets.List(traitlets.CFloat, default_value=[0] * 16, allow_none=True, minlen=16, maxlen=16).tag(sync=True) matrix_world = traitlets.List(traitlets.CFloat, default_value=[0] * 16, allow_none=True, minlen=16, maxlen=16).tag(sync=True) xlabel = traitlets.Unicode("x").tag(sync=True) ylabel = traitlets.Unicode("y").tag(sync=True) zlabel = traitlets.Unicode("z").tag(sync=True) style = traitlets.Dict(default_value=ipyvolume.styles.default).tag(sync=True) render_continuous = traitlets.Bool(False).tag(sync=True) selector = traitlets.Unicode(default_value='lasso').tag(sync=True) selection_mode = traitlets.Unicode(default_value='replace').tag(sync=True) mouse_mode = traitlets.Unicode(default_value='normal').tag(sync=True) panorama_mode = traitlets.Enum(values=['no', '360', '180'], default_value='no').tag(sync=True) #xlim = traitlets.Tuple(traitlets.CFloat(0), traitlets.CFloat(1)).tag(sync=True) #y#lim = traitlets.Tuple(traitlets.CFloat(0), traitlets.CFloat(1)).tag(sync=True) #zlim = traitlets.Tuple(traitlets.CFloat(0), traitlets.CFloat(1)).tag(sync=True) def __init__(self, **kwargs): super(Figure, self).__init__(**kwargs) self._screenshot_handlers = widgets.CallbackDispatcher() self._selection_handlers = widgets.CallbackDispatcher() self.on_msg(self._handle_custom_msg) def __enter__(self): """Sets this figure as the current in the pylab API Example: >>> f1 = ipv.figure(1) >>> f2 = ipv.figure(2) >>> with f1: >>> ipv.scatter(x, y, z) >>> assert ipv.gcf() is f2 """ self._previous_figure = ipv.gcf() ipv.figure(self) def __exit__(self, type, value, traceback): ipv.figure(self._previous_figure) del self._previous_figure def screenshot(self, width=None, height=None, mime_type='image/png'): self.send({'msg':'screenshot', 'width':width, 'height':height, 'mime_type':mime_type}) def on_screenshot(self, callback, remove=False): self._screenshot_handlers.register_callback(callback, remove=remove) def _handle_custom_msg(self, content, buffers): if content.get('event', '') == 'screenshot': self._screenshot_handlers(content['data']) elif content.get('event', '') == 'selection': self._selection_handlers(content['data']) def on_selection(self, callback, remove=False): self._selection_handlers.register_callback(callback, remove=remove) def project(self, x, y, z): W = np.matrix(self.matrix_world).reshape((4,4)) .T P = np.matrix(self.matrix_projection).reshape((4,4)).T M = np.dot(P, W) x = np.asarray(x) vertices = np.array([x, y, z, np.ones(x.shape)]) screen_h = np.tensordot(M, vertices, axes=(1, 0)) xy = screen_h[:2] / screen_h[3] return xy
class RasterLayer(Layer): _view_name = Unicode('LeafletRasterLayerView').tag(sync=True) _model_name = Unicode('LeafletRasterLayerModel').tag(sync=True) opacity = Float(1.0, min=0.0, max=1.0).tag(sync=True) visible = Bool(True).tag(sync=True)
class ColorMapping(PluginBlock): _view_name = Unicode('ColorMappingView').tag(sync=True) _model_name = Unicode('ColorMappingModel').tag(sync=True) _input_data_dim = Int(1) colormap = Enum(('viridis', 'plasma', 'magma', 'inferno'), default_value='viridis').tag(sync=True) colormap_min = Float().tag(sync=True) colormap_max = Float().tag(sync=True) def interact(self): if not self.initialized_widgets: self._init_colormapping_widgets() self.initialized_widgets = True return HBox( self._interact() + (VBox((self.colormap_wid, self.colormapslider_wid)), ) ) def __init__(self, *args, **kwargs): super(ColorMapping, self).__init__(*args, **kwargs) self.initialized_widgets = False self.colormap_wid = None self.colormapslider_wid = None def _init_colormapping_widgets(self): self.colormap_wid = Dropdown( description='Colormap', options=['viridis', 'plasma', 'magma', 'inferno'], value=self.colormap ) self.colormap_wid.layout.width = 'fit-content' min, max = self._get_component_min_max( self.input_data, self.input_components[0]) self.colormapslider_wid = FloatRangeSlider( value=[self.colormap_min, self.colormap_max], min=min, max=max, description="Colormap bounds" ) def on_range_change(change): self.colormap_min = change['new'][0] self.colormap_max = change['new'][1] self.colormapslider_wid.observe(on_range_change, 'value') link((self.colormap_wid, 'value'), (self, 'colormap')) @observe('input_components') def _on_input_components_change(self, change): min, max = self._get_component_min_max( self.input_data, self.input_components[0]) self.colormap_min = min self.colormap_max = max if self.initialized_widgets: self.colormapslider_wid.min = min self.colormapslider_wid.max = max self.colormapslider_wid.value = [min, max]
class PointCloud(PluginBlock): _view_name = Unicode('PointCloudView').tag(sync=True) _model_name = Unicode('PointCloudModel').tag(sync=True) points_size = Float(3.).tag(sync=True) percentage_points = Float(1.).tag(sync=True) distribution = Enum(('ordered', 'random'), default_value='ordered').tag(sync=True) mode = Enum(('volume', 'surface'), default_value='volume').tag(sync=True) def interact(self): if not self.initialized_widgets: self._init_pointcloud_widgets() self.initialized_widgets = True widgets = ( self.points_size_wid, self.percentage_points_wid, self.distribution_wid ) if self.mode_wid is not None: widgets = widgets + (self.mode_wid, ) return HBox( self._interact() + (VBox(widgets), ) ) def __init__(self, *args, **kwargs): super(PointCloud, self).__init__(*args, **kwargs) self.initialized_widgets = False self.points_size_wid = None self.percentage_points_wid = None self.distribution_wid = None self.mode_wid = None def _init_pointcloud_widgets(self): self.points_size_wid = FloatSlider( description='Size', min=1., max=20., value=self.points_size ) self.percentage_points_wid = FloatSlider( description='Nb points', step=0.01, min=0.0, max=1.0, value=self.percentage_points, readout_format='.2%' ) self.distribution_wid = ToggleButtons( description='Distribution', options=['ordered', 'random'], value=self.distribution ) # Check if it's a volumetric mesh block = self while not isinstance(block, DataBlock): block = block._parent_block if len(block.mesh.tetrahedrons) != 0: self.mode_wid = ToggleButtons( description='Mode', options=['volume', 'surface'], value=self.mode ) link((self, 'mode'), (self.mode_wid, 'value')) link((self, 'points_size'), (self.points_size_wid, 'value')) link((self, 'percentage_points'), (self.percentage_points_wid, 'value')) link((self, 'distribution'), (self.distribution_wid, 'value')) def _validate_parent(self, parent): block = parent while not isinstance(block, DataBlock): if isinstance(block, VectorField) or isinstance(block, PointCloud): raise RuntimeError('Cannot apply a PointCloud after a VectorField effect or a PointCloud effect') block = block._parent_block
class ContentsManager(LoggingConfigurable): """Base class for serving files and directories. This serves any text or binary file, as well as directories, with special handling for JSON notebook documents. Most APIs take a path argument, which is always an API-style unicode path, and always refers to a directory. - unicode, not url-escaped - '/'-separated - leading and trailing '/' will be stripped - if unspecified, path defaults to '', indicating the root path. """ notary = Instance(sign.NotebookNotary) def _notary_default(self): return sign.NotebookNotary(parent=self) hide_globs = List(Unicode(), [ u'__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~', ], config=True, help=""" Glob patterns to hide in file and directory listings. """) untitled_notebook = Unicode( "Untitled", config=True, help="The base name used when creating untitled notebooks.") untitled_file = Unicode( "untitled", config=True, help="The base name used when creating untitled files.") untitled_directory = Unicode( "Untitled Folder", config=True, help="The base name used when creating untitled directories.") pre_save_hook = Any(None, config=True, allow_none=True, help="""Python callable or importstring thereof To be called on a contents model prior to save. This can be used to process the structure, such as removing notebook outputs or other side effects that should not be saved. It will be called as (all arguments passed by keyword):: hook(path=path, model=model, contents_manager=self) - model: the model to be saved. Includes file contents. Modifying this dict will affect the file that is stored. - path: the API path of the save destination - contents_manager: this ContentsManager instance """) def _pre_save_hook_changed(self, name, old, new): if new and isinstance(new, string_types): self.pre_save_hook = import_item(self.pre_save_hook) elif new: if not callable(new): raise TraitError("pre_save_hook must be callable") def run_pre_save_hook(self, model, path, **kwargs): """Run the pre-save hook if defined, and log errors""" if self.pre_save_hook: try: self.log.debug("Running pre-save hook on %s", path) self.pre_save_hook(model=model, path=path, contents_manager=self, **kwargs) except Exception: self.log.error("Pre-save hook failed on %s", path, exc_info=True) checkpoints_class = Type(Checkpoints, config=True) checkpoints = Instance(Checkpoints, config=True) checkpoints_kwargs = Dict(config=True) def _checkpoints_default(self): return self.checkpoints_class(**self.checkpoints_kwargs) def _checkpoints_kwargs_default(self): return dict( parent=self, log=self.log, ) # ContentsManager API part 1: methods that must be # implemented in subclasses. def dir_exists(self, path): """Does a directory exist at the given path? Like os.path.isdir Override this method in subclasses. Parameters ---------- path : string The path to check Returns ------- exists : bool Whether the path does indeed exist. """ raise NotImplementedError def is_hidden(self, path): """Is path a hidden directory or file? Parameters ---------- path : string The path to check. This is an API path (`/` separated, relative to root dir). Returns ------- hidden : bool Whether the path is hidden. """ raise NotImplementedError def file_exists(self, path=''): """Does a file exist at the given path? Like os.path.isfile Override this method in subclasses. Parameters ---------- path : string The API path of a file to check for. Returns ------- exists : bool Whether the file exists. """ raise NotImplementedError('must be implemented in a subclass') def exists(self, path): """Does a file or directory exist at the given path? Like os.path.exists Parameters ---------- path : string The API path of a file or directory to check for. Returns ------- exists : bool Whether the target exists. """ return self.file_exists(path) or self.dir_exists(path) def get(self, path, content=True, type=None, format=None): """Get a file or directory model.""" raise NotImplementedError('must be implemented in a subclass') def save(self, model, path): """ Save a file or directory model to path. Should return the saved model with no content. Save implementations should call self.run_pre_save_hook(model=model, path=path) prior to writing any data. """ raise NotImplementedError('must be implemented in a subclass') def delete_file(self, path): """Delete the file or directory at path.""" raise NotImplementedError('must be implemented in a subclass') def rename_file(self, old_path, new_path): """Rename a file or directory.""" raise NotImplementedError('must be implemented in a subclass') # ContentsManager API part 2: methods that have useable default # implementations, but can be overridden in subclasses. def delete(self, path): """Delete a file/directory and any associated checkpoints.""" path = path.strip('/') if not path: raise HTTPError(400, "Can't delete root") self.delete_file(path) self.checkpoints.delete_all_checkpoints(path) def rename(self, old_path, new_path): """Rename a file and any checkpoints associated with that file.""" self.rename_file(old_path, new_path) self.checkpoints.rename_all_checkpoints(old_path, new_path) def update(self, model, path): """Update the file's path For use in PATCH requests, to enable renaming a file without re-uploading its contents. Only used for renaming at the moment. """ path = path.strip('/') new_path = model.get('path', path).strip('/') if path != new_path: self.rename(path, new_path) model = self.get(new_path, content=False) return model def info_string(self): return "Serving contents" def get_kernel_path(self, path, model=None): """Return the API path for the kernel KernelManagers can turn this value into a filesystem path, or ignore it altogether. The default value here will start kernels in the directory of the notebook server. FileContentsManager overrides this to use the directory containing the notebook. """ return '' def increment_filename(self, filename, path='', insert=''): """Increment a filename until it is unique. Parameters ---------- filename : unicode The name of a file, including extension path : unicode The API path of the target's directory Returns ------- name : unicode A filename that is unique, based on the input filename. """ path = path.strip('/') basename, ext = os.path.splitext(filename) for i in itertools.count(): if i: insert_i = '{}{}'.format(insert, i) else: insert_i = '' name = u'{basename}{insert}{ext}'.format(basename=basename, insert=insert_i, ext=ext) if not self.exists(u'{}/{}'.format(path, name)): break return name def validate_notebook_model(self, model): """Add failed-validation message to model""" try: validate(model['content']) except ValidationError as e: model['message'] = u'Notebook Validation failed: {}:\n{}'.format( e.message, json.dumps(e.instance, indent=1, default=lambda obj: '<UNKNOWN>'), ) return model def new_untitled(self, path='', type='', ext=''): """Create a new untitled file or directory in path path must be a directory File extension can be specified. Use `new` to create files with a fully specified path (including filename). """ path = path.strip('/') if not self.dir_exists(path): raise HTTPError(404, 'No such directory: %s' % path) model = {} if type: model['type'] = type if ext == '.ipynb': model.setdefault('type', 'notebook') else: model.setdefault('type', 'file') insert = '' if model['type'] == 'directory': untitled = self.untitled_directory insert = ' ' elif model['type'] == 'notebook': untitled = self.untitled_notebook ext = '.ipynb' elif model['type'] == 'file': untitled = self.untitled_file else: raise HTTPError(400, "Unexpected model type: %r" % model['type']) name = self.increment_filename(untitled + ext, path, insert=insert) path = u'{0}/{1}'.format(path, name) return self.new(model, path) def new(self, model=None, path=''): """Create a new file or directory and return its model with no content. To create a new untitled entity in a directory, use `new_untitled`. """ path = path.strip('/') if model is None: model = {} if path.endswith('.ipynb'): model.setdefault('type', 'notebook') else: model.setdefault('type', 'file') # no content, not a directory, so fill out new-file model if 'content' not in model and model['type'] != 'directory': if model['type'] == 'notebook': model['content'] = new_notebook() model['format'] = 'json' else: model['content'] = '' model['type'] = 'file' model['format'] = 'text' model = self.save(model, path) return model def copy(self, from_path, to_path=None): """Copy an existing file and return its new model. If to_path not specified, it will be the parent directory of from_path. If to_path is a directory, filename will increment `from_path-Copy#.ext`. from_path must be a full path to a file. """ path = from_path.strip('/') if to_path is not None: to_path = to_path.strip('/') if '/' in path: from_dir, from_name = path.rsplit('/', 1) else: from_dir = '' from_name = path model = self.get(path) model.pop('path', None) model.pop('name', None) if model['type'] == 'directory': raise HTTPError(400, "Can't copy directories") if to_path is None: to_path = from_dir if self.dir_exists(to_path): name = copy_pat.sub(u'.', from_name) to_name = self.increment_filename(name, to_path, insert='-Copy') to_path = u'{0}/{1}'.format(to_path, to_name) model = self.save(model, to_path) return model def log_info(self): self.log.info(self.info_string()) def trust_notebook(self, path): """Explicitly trust a notebook Parameters ---------- path : string The path of a notebook """ model = self.get(path) nb = model['content'] self.log.warn("Trusting notebook %s", path) self.notary.mark_cells(nb, True) self.save(model, path) def check_and_sign(self, nb, path=''): """Check for trusted cells, and sign the notebook. Called as a part of saving notebooks. Parameters ---------- nb : dict The notebook dict path : string The notebook's path (for logging) """ if self.notary.check_cells(nb): self.notary.sign(nb) else: self.log.warn("Saving untrusted notebook %s", path) def mark_trusted_cells(self, nb, path=''): """Mark cells as trusted if the notebook signature matches. Called as a part of loading notebooks. Parameters ---------- nb : dict The notebook object (in current nbformat) path : string The notebook's path (for logging) """ trusted = self.notary.check_signature(nb) if not trusted: self.log.warn("Notebook %s is not trusted", path) self.notary.mark_cells(nb, trusted) def should_list(self, name): """Should this file/directory name be displayed in a listing?""" return not any(fnmatch(name, glob) for glob in self.hide_globs) # Part 3: Checkpoints API def create_checkpoint(self, path): """Create a checkpoint.""" return self.checkpoints.create_checkpoint(self, path) def restore_checkpoint(self, checkpoint_id, path): """ Restore a checkpoint. """ self.checkpoints.restore_checkpoint(self, checkpoint_id, path) def list_checkpoints(self, path): return self.checkpoints.list_checkpoints(path) def delete_checkpoint(self, checkpoint_id, path): return self.checkpoints.delete_checkpoint(checkpoint_id, path)
class DataBlock(Block): _view_name = Unicode('DataBlockView').tag(sync=True) _model_name = Unicode('DataBlockModel').tag(sync=True) mesh = Instance(Mesh).tag(sync=True, **widget_serialization)
class PedestalMaker(Component): name = 'PedestalMaker' output_path = Unicode(None, allow_none=True, help='Path to save the TargetCalib pedestal ' 'file').tag(config=True) n_tms = Int(32, help='Number of TARGET modules connected').tag(config=True) n_blocks = Int(512, help='Number of blocks').tag(config=True) n_samples = Int(96, help='Number of samples').tag(config=True) diagnosis = Bool(False, help='Run diagnosis while creating ' 'file?').tag(config=True) std = Bool(False, help='Track standard deviation while creating ' 'file?').tag(config=True) compress = Bool(False, help='Compress the pedestal file? (store in uint16 ' 'instead of floats').tag(config=True) stddev = Bool(False, help='Create a numpy file containing the standard ' 'deviation of the pedestal').tag(config=True) def __init__(self, config, tool, **kwargs): """ Generator of Pedestal files. Parameters ---------- config : traitlets.loader.Config Configuration specified by config file or cmdline arguments. Used to set traitlet values. Set to None if no configuration to pass. tool : ctapipe.core.Tool Tool executable that is calling this component. Passes the correct logger to the component. Set to None if no Tool to pass. kwargs """ super().__init__(config=config, parent=tool, **kwargs) if self.output_path is None: raise ValueError("Please specify an output path to save " "pedestal file") self.ped_obj = TCPedestalMaker(self.n_tms, self.n_blocks, self.n_samples, self.diagnosis, self.std) self.ped_stats = None # if self.stddev: # self.ped_stats = PedestalMeanStdDev(self.n_tms * 64, # self.n_cells) def add_event(self, event): """ Add an event into the pedestal. Parameters ---------- event : container A `ctapipe` event container """ telid = 0 waveforms = event.r0.tel[telid].adc_samples[0] first_cell_ids = event.r0.tel[telid].first_cell_ids self.ped_obj.AddEvent(waveforms, first_cell_ids) if self.ped_stats: self.ped_stats.send_waveform(waveforms, first_cell_ids) def save(self): """ Save the pedestal file. """ self.log.info("Saving pedestal to: {}".format(self.output_path)) self.ped_obj.Save(self.output_path, self.compress) if self.ped_stats: stddev_path = os.path.splitext(self.output_path)[0] + '_stddev.npy' self.log.info("Saving pedestal stddev to: {}".format(stddev_path)) np.save(stddev_path, self.ped_stats.stddev)
class TFMaker(Component): name = 'TFMaker' vped_list = List(Int, None, allow_none=True, help='List of the vped value for each input ' 'file').tag(config=True) pedestal_path = Unicode(None, allow_none=True, help='Path to the pedestal file (TF requires the ' 'pedestal to be first subtracted before ' 'generating').tag(config=True) adc_step = Int(8, help='Step in ADC that the TF file will be stored ' 'in').tag(config=True) output_path = Unicode(None, allow_none=True, help='Path to save the TargetCalib pedestal ' 'file').tag(config=True) number_tms = Int(32, help='Number of TARGET modules ' 'connected').tag(config=True) vped_zero = Int(1050, help='VPed value for the pedestal').tag(config=True) compress = Bool(False, help='Compress the TF file?').tag(config=True) tf_input = Bool(False, help='Create a numpy file containing the input TF ' 'array before the switch of ' 'axis').tag(config=True) def __init__(self, config, tool, **kwargs): """ Generator of Transfer Function files. Parameters ---------- config : traitlets.loader.Config Configuration specified by config file or cmdline arguments. Used to set traitlet values. Set to None if no configuration to pass. tool : ctapipe.core.Tool Tool executable that is calling this component. Passes the correct logger to the component. Set to None if no Tool to pass. kwargs """ super().__init__(config=config, parent=tool, **kwargs) if self.vped_list is None: raise ValueError("Please supply vped_list") if self.pedestal_path is None: raise ValueError("Please specify a pedestal path") if self.output_path is None: raise ValueError("Please specify an output path to save " "TF file") self.ped = PedestalSubtractor(config=config, tool=tool, pedestal_path=self.pedestal_path) vpeds = np.array(self.vped_list, dtype=np.uint16) self.tf_obj = TCTfMaker(vpeds, self.number_tms, self.vped_zero) self.current_vped = None def add_event(self, event, vped): """ Add an event into the transfer function. Parameters ---------- event : container A `ctapipe` event container vped: int The vped of file from which the event comes from """ if self.current_vped != vped: self.current_vped = vped self.tf_obj.SetVpedIndex(vped) telid = 0 tm = event.meta['tm'] tmpix = event.meta['tmpix'] waveforms = event.r0.tel[telid].adc_samples[0] first_cell_ids = event.r0.tel[telid].first_cell_ids pedsub = np.zeros(waveforms.shape, dtype=np.float32) self.ped.apply(event, pedsub) self.tf_obj.AddEvent(pedsub, first_cell_ids) def save(self): """ Save the pedestal file. """ self.log.info("Saving transfer function to: {}".format( self.output_path)) self.tf_obj.Save(self.output_path, self.adc_step, self.compress) if self.tf_input: self.save_tf_input() def save_tf_input(self): tf_input = np.array(self.tf_obj.GetTf()) vped_vector = np.array(self.tf_obj.GetVpedVector()) tfinput_path = os.path.splitext(self.output_path)[0] + '_input.npy' vped_path = os.path.splitext(self.output_path)[0] + '_vped.npy' self.log.info("Saving TF input array to: {}".format(tfinput_path)) np.save(tfinput_path, tf_input) self.log.info("Saving Vped vector to: {}".format(vped_path)) np.save(vped_path, vped_vector)
class Output(DOMWidget): """Widget used as a context manager to display output. This widget can capture and display stdout, stderr, and rich output. To use it, create an instance of it and display it. You can then use the widget as a context manager: any output produced while in the context will be captured and displayed in the widget instead of the standard output area. You can also use the .capture() method to decorate a function or a method. Any output produced by the function will then go to the output widget. This is useful for debugging widget callbacks, for example. Example:: import ipywidgets as widgets from IPython.display import display out = widgets.Output() display(out) print('prints to output area') with out: print('prints to output widget') @out.capture() def func(): print('prints to output widget') """ _view_name = Unicode('OutputView').tag(sync=True) _model_name = Unicode('OutputModel').tag(sync=True) _view_module = Unicode('@jupyter-widgets/output').tag(sync=True) _model_module = Unicode('@jupyter-widgets/output').tag(sync=True) _view_module_version = Unicode(__jupyter_widgets_output_version__).tag(sync=True) _model_module_version = Unicode(__jupyter_widgets_output_version__).tag(sync=True) msg_id = Unicode('', help="Parent message id of messages to capture").tag(sync=True) outputs = TypedTuple(trait=Dict(), help="The output messages synced from the frontend.").tag(sync=True) def clear_output(self, *pargs, **kwargs): """ Clear the content of the output widget. Parameters ---------- wait: bool If True, wait to clear the output until new output is available to replace it. Default: False """ with self: clear_output(*pargs, **kwargs) # PY3: Force passing clear_output and clear_kwargs as kwargs def capture(self, clear_output=False, *clear_args, **clear_kwargs): """ Decorator to capture the stdout and stderr of a function. Parameters ---------- clear_output: bool If True, clear the content of the output widget at every new function call. Default: False wait: bool If True, wait to clear the output until new output is available to replace it. This is only used if clear_output is also True. Default: False """ def capture_decorator(func): @wraps(func) def inner(*args, **kwargs): if clear_output: self.clear_output(*clear_args, **clear_kwargs) with self: return func(*args, **kwargs) return inner return capture_decorator def __enter__(self): """Called upon entering output widget context manager.""" self._flush() ip = get_ipython() if ip and hasattr(ip, 'kernel') and hasattr(ip.kernel, '_parent_header'): self.msg_id = ip.kernel._parent_header['header']['msg_id'] def __exit__(self, etype, evalue, tb): """Called upon exiting output widget context manager.""" ip = get_ipython() if etype is not None: ip.showtraceback((etype, evalue, tb), tb_offset=0) self._flush() self.msg_id = '' # suppress exceptions, since they are shown above return True def _flush(self): """Flush stdout and stderr buffers.""" sys.stdout.flush() sys.stderr.flush() def _append_stream_output(self, text, stream_name): """Append a stream output.""" self.outputs += ( {'output_type': 'stream', 'name': stream_name, 'text': text}, ) def append_stdout(self, text): """Append text to the stdout stream.""" self._append_stream_output(text, stream_name='stdout') def append_stderr(self, text): """Append text to the stderr stream.""" self._append_stream_output(text, stream_name='stderr') def append_display_data(self, display_object): """Append a display object as an output. Parameters ---------- display_object : IPython.core.display.DisplayObject The object to display (e.g., an instance of `IPython.display.Markdown` or `IPython.display.Image`). """ fmt = InteractiveShell.instance().display_formatter.format data, metadata = fmt(display_object) self.outputs += ( { 'output_type': 'display_data', 'data': data, 'metadata': metadata }, )
class PredictionApplication(Application): name = Unicode(u'hdnnpy predict') description = ('Predict properties for atomic structures using trained' ' HDNNP.') verbose = Bool( False, help='Set verbose mode' ).tag(config=True) classes = List([PredictionConfig]) config_file = Path( 'prediction_config.py', help='Load this config file') aliases = Dict({ 'log_level': 'Application.log_level', }) flags = Dict({ 'verbose': ({ 'PredictionApplication': { 'verbose': True, }, }, 'Set verbose mode'), 'v': ({ 'PredictionApplication': { 'verbose': True, }, }, 'Set verbose mode'), 'debug': ({ 'Application': { 'log_level': 10, }, }, 'Set log level to DEBUG'), }) def __init__(self, **kwargs): super().__init__(**kwargs) self.dataset_config = None self.model_config = None self.prediction_config = None def initialize(self, argv=None): self.parse_command_line(argv) self.load_config_file(self.config_file) self.prediction_config = PredictionConfig(config=self.config) yaml.add_constructor('Path', pyyaml_path_constructor) training_result = yaml.load( (self.prediction_config.load_dir / 'training_result.yaml').open()) self.dataset_config = DatasetConfig(**training_result['dataset']) self.model_config = ModelConfig(**training_result['model']) def start(self): pc = self.prediction_config shutil.copy(self.config_file, pc.load_dir / self.config_file.name) tag_xyz_map, pc.elements = parse_xyz( pc.data_file, save=False, verbose=self.verbose) datasets = self.construct_datasets(tag_xyz_map) datasets = DatasetGenerator(*datasets).all() if MPI.rank == 0: results = self.predict(datasets) self.dump_result(results) def construct_datasets(self, tag_xyz_map): dc = self.dataset_config mc = self.model_config pc = self.prediction_config preprocesses = [] for (name, args, kwargs) in dc.preprocesses: preprocess = PREPROCESS[name](*args, **kwargs) preprocess.load( pc.load_dir / 'preprocess' / f'{name}.npz', verbose=self.verbose) preprocesses.append(preprocess) datasets = [] for pattern in pc.tags: for tag in fnmatch.filter(tag_xyz_map, pattern): if self.verbose: pprint(f'Construct sub dataset tagged as "{tag}"') tagged_xyz = tag_xyz_map.pop(tag) structures = AtomicStructure.read_xyz(tagged_xyz) # prepare descriptor dataset descriptor = DESCRIPTOR_DATASET[dc.descriptor]( pc.order, structures, **dc.parameters) descriptor.make(verbose=self.verbose) # prepare empty property dataset property_ = PROPERTY_DATASET[dc.property_]( pc.order, structures) # construct test dataset from descriptor & property datasets dataset = HDNNPDataset(descriptor, property_) dataset.construct( all_elements=pc.elements, preprocesses=preprocesses, shuffle=False, verbose=self.verbose) datasets.append(dataset) dc.n_sample += dataset.total_size mc.n_input = dataset.n_input mc.n_output = dataset.n_label return datasets def predict(self, datasets): mc = self.model_config pc = self.prediction_config results = [] # master model master_nnp = MasterNNP( pc.elements, mc.n_input, mc.hidden_layers, mc.n_output) chainer.serializers.load_npz( pc.load_dir / 'master_nnp.npz', master_nnp) for dataset in datasets: # hdnnp model hdnnp = HighDimensionalNNP( dataset.elemental_composition, mc.n_input, mc.hidden_layers, mc.n_output) hdnnp.sync_param_with(master_nnp) batch = chainer.dataset.concat_examples(dataset) inputs = [batch[f'inputs/{i}'] for i in range(pc.order + 1)] with chainer.using_config('train', False), \ chainer.using_config('enable_backprop', False): predictions = hdnnp.predict(inputs, pc.order) result = { **{'tag': dataset.tag}, **{property_: coefficient * prediction.data for property_, coefficient, prediction in zip(dataset.property.properties, dataset.property.coefficients, predictions)}, } results.append(result) return results def dump_result(self, results): pc = self.prediction_config result_file = pc.load_dir / f'prediction_result{pc.dump_format}' if pc.dump_format == '.npz': kv_result = {} for result in results: tag = result.pop('tag') kv_result.update({tag + '/' + key: value for key, value in result.items()}) np.savez(result_file, **kv_result)
class Tab(SelectionContainer): _view_name = Unicode('TabView').tag(sync=True) _model_name = Unicode('TabModel').tag(sync=True) def __init__(self, childrens, labels): super(Tab, self).__init__(childrens, labels)
class Widget(LoggingHasTraits): #------------------------------------------------------------------------- # Class attributes #------------------------------------------------------------------------- _widget_construction_callback = None # _active_widgets is a dictionary of all active widget objects _active_widgets = {} # _widget_types is a registry of widgets by module, version, and name: _widget_types = WidgetRegistry() @classmethod def close_all(cls): for widget in list(cls._active_widgets.values()): widget.close() @staticmethod def on_widget_constructed(callback): """Registers a callback to be called when a widget is constructed. The callback must have the following signature: callback(widget)""" Widget._widget_construction_callback = callback @staticmethod def _call_widget_constructed(widget): """Static method, called when a widget is constructed.""" if Widget._widget_construction_callback is not None and callable( Widget._widget_construction_callback): Widget._widget_construction_callback(widget) @staticmethod def handle_comm_opened(comm, msg): """Static method, called when a widget is constructed.""" version = msg.get('metadata', {}).get('version', '') if version.split('.')[0] != PROTOCOL_VERSION_MAJOR: raise ValueError( "Incompatible widget protocol versions: received version %r, expected version %r" % (version, __protocol_version__)) data = msg['content']['data'] state = data['state'] # Find the widget class to instantiate in the registered widgets widget_class = Widget._widget_types.get(state['_model_module'], state['_model_module_version'], state['_model_name'], state['_view_module'], state['_view_module_version'], state['_view_name']) widget = widget_class(comm=comm) if 'buffer_paths' in data: _put_buffers(state, data['buffer_paths'], msg['buffers']) widget.set_state(state) @staticmethod def get_manager_state(drop_defaults=False, widgets=None): """Returns the full state for a widget manager for embedding :param drop_defaults: when True, it will not include default value :param widgets: list with widgets to include in the state (or all widgets when None) :return: """ state = {} if widgets is None: widgets = Widget._active_widgets.values() for widget in widgets: state[widget.model_id] = widget._get_embed_state( drop_defaults=drop_defaults) return {'version_major': 2, 'version_minor': 0, 'state': state} def _get_embed_state(self, drop_defaults=False): state = { 'model_name': self._model_name, 'model_module': self._model_module, 'model_module_version': self._model_module_version } model_state, buffer_paths, buffers = _remove_buffers( self.get_state(drop_defaults=drop_defaults)) state['state'] = model_state if len(buffers) > 0: state['buffers'] = [{ 'encoding': 'base64', 'path': p, 'data': standard_b64encode(d).decode('ascii') } for p, d in zip(buffer_paths, buffers)] return state def get_view_spec(self): return dict(version_major=2, version_minor=0, model_id=self._model_id) #------------------------------------------------------------------------- # Traits #------------------------------------------------------------------------- _model_name = Unicode('WidgetModel', help="Name of the model.", read_only=True).tag(sync=True) _model_module = Unicode('@jupyter-widgets/base', help="The namespace for the model.", read_only=True).tag(sync=True) _model_module_version = Unicode( __jupyter_widgets_base_version__, help="A semver requirement for namespace version containing the model.", read_only=True).tag(sync=True) _view_name = Unicode(None, allow_none=True, help="Name of the view.").tag(sync=True) _view_module = Unicode(None, allow_none=True, help="The namespace for the view.").tag(sync=True) _view_module_version = Unicode( '', help= "A semver requirement for the namespace version containing the view." ).tag(sync=True) _view_count = Int( None, allow_none=True, help= "EXPERIMENTAL: The number of views of the model displayed in the frontend. This attribute is experimental and may change or be removed in the future. None signifies that views will not be tracked. Set this to 0 to start tracking view creation/deletion." ).tag(sync=True) comm = Instance('ipykernel.comm.Comm', allow_none=True) keys = List(help="The traits which are synced.") @default('keys') def _default_keys(self): return [name for name in self.traits(sync=True)] _property_lock = Dict() _holding_sync = False _states_to_send = Set() _msg_callbacks = Instance(CallbackDispatcher, ()) #------------------------------------------------------------------------- # (Con/de)structor #------------------------------------------------------------------------- def __init__(self, **kwargs): """Public constructor""" self._model_id = kwargs.pop('model_id', None) super().__init__(**kwargs) Widget._call_widget_constructed(self) self.open() def __del__(self): """Object disposal""" self.close() #------------------------------------------------------------------------- # Properties #------------------------------------------------------------------------- def open(self): """Open a comm to the frontend if one isn't already open.""" if self.comm is None: state, buffer_paths, buffers = _remove_buffers(self.get_state()) args = dict(target_name='jupyter.widget', data={ 'state': state, 'buffer_paths': buffer_paths }, buffers=buffers, metadata={'version': __protocol_version__}) if self._model_id is not None: args['comm_id'] = self._model_id self.comm = Comm(**args) @observe('comm') def _comm_changed(self, change): """Called when the comm is changed.""" if change['new'] is None: return self._model_id = self.model_id self.comm.on_msg(self._handle_msg) Widget._active_widgets[self.model_id] = self @property def model_id(self): """Gets the model id of this widget. If a Comm doesn't exist yet, a Comm will be created automagically.""" return self.comm.comm_id #------------------------------------------------------------------------- # Methods #------------------------------------------------------------------------- def close(self): """Close method. Closes the underlying comm. When the comm is closed, all of the widget views are automatically removed from the front-end.""" if self.comm is not None: Widget._active_widgets.pop(self.model_id, None) self.comm.close() self.comm = None self._repr_mimebundle_ = None def send_state(self, key=None): """Sends the widget state, or a piece of it, to the front-end, if it exists. Parameters ---------- key : unicode, or iterable (optional) A single property's name or iterable of property names to sync with the front-end. """ state = self.get_state(key=key) if len(state) > 0: if self._property_lock: # we need to keep this dict up to date with the front-end values for name, value in state.items(): if name in self._property_lock: self._property_lock[name] = value state, buffer_paths, buffers = _remove_buffers(state) msg = { 'method': 'update', 'state': state, 'buffer_paths': buffer_paths } self._send(msg, buffers=buffers) def get_state(self, key=None, drop_defaults=False): """Gets the widget state, or a piece of it. Parameters ---------- key : unicode or iterable (optional) A single property's name or iterable of property names to get. Returns ------- state : dict of states metadata : dict metadata for each field: {key: metadata} """ if key is None: keys = self.keys elif isinstance(key, str): keys = [key] elif isinstance(key, Iterable): keys = key else: raise ValueError( "key must be a string, an iterable of keys, or None") state = {} traits = self.traits() for k in keys: to_json = self.trait_metadata(k, 'to_json', self._trait_to_json) value = to_json(getattr(self, k), self) if not drop_defaults or not self._compare(value, traits[k].default_value): state[k] = value return state def _is_numpy(self, x): return x.__class__.__name__ == 'ndarray' and x.__class__.__module__ == 'numpy' def _compare(self, a, b): if self._is_numpy(a) or self._is_numpy(b): import numpy as np return np.array_equal(a, b) else: return a == b def set_state(self, sync_data): """Called when a state is received from the front-end.""" # The order of these context managers is important. Properties must # be locked when the hold_trait_notification context manager is # released and notifications are fired. with self._lock_property(**sync_data), self.hold_trait_notifications(): for name in sync_data: if name in self.keys: from_json = self.trait_metadata(name, 'from_json', self._trait_from_json) self.set_trait(name, from_json(sync_data[name], self)) def send(self, content, buffers=None): """Sends a custom msg to the widget model in the front-end. Parameters ---------- content : dict Content of the message to send. buffers : list of binary buffers Binary buffers to send with message """ self._send({"method": "custom", "content": content}, buffers=buffers) def on_msg(self, callback, remove=False): """(Un)Register a custom msg receive callback. Parameters ---------- callback: callable callback will be passed three arguments when a message arrives:: callback(widget, content, buffers) remove: bool True if the callback should be unregistered.""" self._msg_callbacks.register_callback(callback, remove=remove) def add_traits(self, **traits): """Dynamically add trait attributes to the Widget.""" super().add_traits(**traits) for name, trait in traits.items(): if trait.get_metadata('sync'): self.keys.append(name) self.send_state(name) def notify_change(self, change): """Called when a property has changed.""" # Send the state to the frontend before the user-registered callbacks # are called. name = change['name'] if self.comm is not None and self.comm.kernel is not None: # Make sure this isn't information that the front-end just sent us. if name in self.keys and self._should_send_property( name, getattr(self, name)): # Send new state to front-end self.send_state(key=name) super().notify_change(change) def __repr__(self): return self._gen_repr_from_keys(self._repr_keys()) #------------------------------------------------------------------------- # Support methods #------------------------------------------------------------------------- @contextmanager def _lock_property(self, **properties): """Lock a property-value pair. The value should be the JSON state of the property. NOTE: This, in addition to the single lock for all state changes, is flawed. In the future we may want to look into buffering state changes back to the front-end.""" self._property_lock = properties try: yield finally: self._property_lock = {} @contextmanager def hold_sync(self): """Hold syncing any state until the outermost context manager exits""" if self._holding_sync is True: yield else: try: self._holding_sync = True yield finally: self._holding_sync = False self.send_state(self._states_to_send) self._states_to_send.clear() def _should_send_property(self, key, value): """Check the property lock (property_lock)""" to_json = self.trait_metadata(key, 'to_json', self._trait_to_json) if key in self._property_lock: # model_state, buffer_paths, buffers split_value = _remove_buffers({key: to_json(value, self)}) split_lock = _remove_buffers({key: self._property_lock[key]}) # A roundtrip conversion through json in the comparison takes care of # idiosyncracies of how python data structures map to json, for example # tuples get converted to lists. if (jsonloads(jsondumps(split_value[0])) == split_lock[0] and split_value[1] == split_lock[1] and _buffer_list_equal(split_value[2], split_lock[2])): return False if self._holding_sync: self._states_to_send.add(key) return False else: return True # Event handlers @_show_traceback def _handle_msg(self, msg): """Called when a msg is received from the front-end""" data = msg['content']['data'] method = data['method'] if method == 'update': if 'state' in data: state = data['state'] if 'buffer_paths' in data: _put_buffers(state, data['buffer_paths'], msg['buffers']) self.set_state(state) # Handle a state request. elif method == 'request_state': self.send_state() # Handle a custom msg from the front-end. elif method == 'custom': if 'content' in data: self._handle_custom_msg(data['content'], msg['buffers']) # Catch remainder. else: self.log.error( 'Unknown front-end to back-end widget msg with method "%s"' % method) def _handle_custom_msg(self, content, buffers): """Called when a custom msg is received.""" self._msg_callbacks(self, content, buffers) @staticmethod def _trait_to_json(x, self): """Convert a trait value to json.""" return x @staticmethod def _trait_from_json(x, self): """Convert json values to objects.""" return x def _repr_mimebundle_(self, **kwargs): plaintext = repr(self) if len(plaintext) > 110: plaintext = plaintext[:110] + '…' data = { 'text/plain': plaintext, } if self._view_name is not None: # The 'application/vnd.jupyter.widget-view+json' mimetype has not been registered yet. # See the registration process and naming convention at # http://tools.ietf.org/html/rfc6838 # and the currently registered mimetypes at # http://www.iana.org/assignments/media-types/media-types.xhtml. data['application/vnd.jupyter.widget-view+json'] = { 'version_major': 2, 'version_minor': 0, 'model_id': self._model_id } return data def _send(self, msg, buffers=None): """Sends a message to the model in the front-end.""" if self.comm is not None and self.comm.kernel is not None: self.comm.send(data=msg, buffers=buffers) def _repr_keys(self): traits = self.traits() for key in sorted(self.keys): # Exclude traits that start with an underscore if key[0] == '_': continue # Exclude traits who are equal to their default value value = getattr(self, key) trait = traits[key] if self._compare(value, trait.default_value): continue elif (isinstance(trait, (Container, Dict)) and trait.default_value == Undefined and (value is None or len(value) == 0)): # Empty container, and dynamic default will be empty continue yield key def _gen_repr_from_keys(self, keys): class_name = self.__class__.__name__ signature = ', '.join('{}={!r}'.format(key, getattr(self, key)) for key in keys) return '{}({})'.format(class_name, signature)
class OAuthenticator(Authenticator): """Base class for OAuthenticators Subclasses must override: login_service (string identifying the service provider) login_handler (likely a subclass of OAuthLoginHandler) authenticate (method takes one arg - the request handler handling the oauth callback) """ scope = List(Unicode(), config=True, help="""The OAuth scopes to request. See the OAuth documentation of your OAuth provider for options. For GitHub in particular, you can see github_scopes.md in this repo. """) login_service = 'override in subclass' oauth_callback_url = Unicode(os.getenv('OAUTH_CALLBACK_URL', ''), config=True, help="""Callback URL to use. Typically `https://{host}/hub/oauth_callback`""") client_id_env = '' client_id = Unicode(config=True) def _client_id_default(self): if self.client_id_env: client_id = os.getenv(self.client_id_env, '') if client_id: return client_id return os.getenv('OAUTH_CLIENT_ID', '') client_secret_env = '' client_secret = Unicode(config=True) def _client_secret_default(self): if self.client_secret_env: client_secret = os.getenv(self.client_secret_env, '') if client_secret: return client_secret return os.getenv('OAUTH_CLIENT_SECRET', '') validate_server_cert_env = 'OAUTH_TLS_VERIFY' validate_server_cert = Bool(config=True) def _validate_server_cert_default(self): env_value = os.getenv(self.validate_server_cert_env, '') if env_value == '0': return False else: return True def login_url(self, base_url): return url_path_join(base_url, 'oauth_login') login_handler = "Specify login handler class in subclass" callback_handler = OAuthCallbackHandler def get_callback_url(self, handler=None): """Get my OAuth redirect URL Either from config or guess based on the current request. """ if self.oauth_callback_url: return self.oauth_callback_url elif handler: return guess_callback_uri(handler.request.protocol, handler.request.host, handler.hub.server.base_url) else: raise ValueError( "Specify callback oauth_callback_url or give me a handler to guess with" ) def get_handlers(self, app): return [ (r'/oauth_login', self.login_handler), (r'/oauth_callback', self.callback_handler), ] @gen.coroutine def authenticate(self, handler, data=None): raise NotImplementedError()
class JupyterQtConsoleApp(JupyterApp, JupyterConsoleApp, JupyterQtConsoleApp): name = 'jupyter-qtconsole' description = """ The Jupyter QtConsole. This launches a Console-style application using Qt. It is not a full console, in that launched terminal subprocesses will not be able to accept input. """ examples = _examples classes = [IPythonWidget] + JupyterConsoleApp.classes flags = Dict(flags) aliases = Dict(aliases) frontend_flags = Any(qt_flags) frontend_aliases = Any(qt_aliases) kernel_client_class = QtKernelClient kernel_manager_class = QtKernelManager stylesheet = Unicode('', config=True, help="path to a custom CSS stylesheet") hide_menubar = CBool( False, config=True, help="Start the console window with the menu bar hidden.") maximize = CBool(False, config=True, help="Start the console window maximized.") plain = CBool( False, config=True, help= "Use a plaintext widget instead of rich text (plain can't print/save)." ) display_banner = CBool( True, config=True, help="Whether to display a banner upon starting the QtConsole.") def _plain_changed(self, name, old, new): kind = 'plain' if new else 'rich' self.config.ConsoleWidget.kind = kind if new: self.widget_factory = IPythonWidget else: self.widget_factory = RichIPythonWidget # the factory for creating a widget widget_factory = Any(RichIPythonWidget) def parse_command_line(self, argv=None): super(JupyterQtConsoleApp, self).parse_command_line(argv) self.build_kernel_argv(self.extra_args) def new_frontend_master(self): """ Create and return new frontend attached to new kernel, launched on localhost. """ kernel_manager = self.kernel_manager_class( connection_file=self._new_connection_file(), parent=self, autorestart=True, ) # start the kernel kwargs = {} # FIXME: remove special treatment of IPython kernels if self.kernel_manager.ipython_kernel: kwargs['extra_arguments'] = self.kernel_argv kernel_manager.start_kernel(**kwargs) kernel_manager.client_factory = self.kernel_client_class kernel_client = kernel_manager.client() kernel_client.start_channels(shell=True, iopub=True) widget = self.widget_factory(config=self.config, local_kernel=True) self.init_colors(widget) widget.kernel_manager = kernel_manager widget.kernel_client = kernel_client widget._existing = False widget._may_close = True widget._confirm_exit = self.confirm_exit widget._display_banner = self.display_banner return widget def new_frontend_slave(self, current_widget): """Create and return a new frontend attached to an existing kernel. Parameters ---------- current_widget : IPythonWidget The IPythonWidget whose kernel this frontend is to share """ kernel_client = self.kernel_client_class( connection_file=current_widget.kernel_client.connection_file, config=self.config, ) kernel_client.load_connection_file() kernel_client.start_channels() widget = self.widget_factory(config=self.config, local_kernel=False) self.init_colors(widget) widget._existing = True widget._may_close = False widget._confirm_exit = False widget._display_banner = self.display_banner widget.kernel_client = kernel_client widget.kernel_manager = current_widget.kernel_manager return widget def init_qt_app(self): # separate from qt_elements, because it must run first self.app = QtGui.QApplication([]) def init_qt_elements(self): # Create the widget. base_path = os.path.abspath(os.path.dirname(__file__)) icon_path = os.path.join(base_path, 'resources', 'icon', 'IPythonConsole.svg') self.app.icon = QtGui.QIcon(icon_path) QtGui.QApplication.setWindowIcon(self.app.icon) ip = self.ip local_kernel = (not self.existing) or is_local_ip(ip) self.widget = self.widget_factory(config=self.config, local_kernel=local_kernel) self.init_colors(self.widget) self.widget._existing = self.existing self.widget._may_close = not self.existing self.widget._confirm_exit = self.confirm_exit self.widget._display_banner = self.display_banner self.widget.kernel_manager = self.kernel_manager self.widget.kernel_client = self.kernel_client self.window = MainWindow( self.app, confirm_exit=self.confirm_exit, new_frontend_factory=self.new_frontend_master, slave_frontend_factory=self.new_frontend_slave, ) self.window.log = self.log self.window.add_tab_with_frontend(self.widget) self.window.init_magic_helper() self.window.init_menu_bar() # Ignore on OSX, where there is always a menu bar if sys.platform != 'darwin' and self.hide_menubar: self.window.menuBar().setVisible(False) self.window.setWindowTitle('IPython') def init_colors(self, widget): """Configure the coloring of the widget""" # Note: This will be dramatically simplified when colors # are removed from the backend. # parse the colors arg down to current known labels cfg = self.config colors = cfg.ZMQInteractiveShell.colors if 'ZMQInteractiveShell.colors' in cfg else None style = cfg.IPythonWidget.syntax_style if 'IPythonWidget.syntax_style' in cfg else None sheet = cfg.IPythonWidget.style_sheet if 'IPythonWidget.style_sheet' in cfg else None # find the value for colors: if colors: colors = colors.lower() if colors in ('lightbg', 'light'): colors = 'lightbg' elif colors in ('dark', 'linux'): colors = 'linux' else: colors = 'nocolor' elif style: if style == 'bw': colors = 'nocolor' elif styles.dark_style(style): colors = 'linux' else: colors = 'lightbg' else: colors = None # Configure the style if style: widget.style_sheet = styles.sheet_from_template(style, colors) widget.syntax_style = style widget._syntax_style_changed() widget._style_sheet_changed() elif colors: # use a default dark/light/bw style widget.set_default_style(colors=colors) if self.stylesheet: # we got an explicit stylesheet if os.path.isfile(self.stylesheet): with open(self.stylesheet) as f: sheet = f.read() else: raise IOError("Stylesheet %r not found." % self.stylesheet) if sheet: widget.style_sheet = sheet widget._style_sheet_changed() def init_signal(self): """allow clean shutdown on sigint""" signal.signal(signal.SIGINT, lambda sig, frame: self.exit(-2)) # need a timer, so that QApplication doesn't block until a real # Qt event fires (can require mouse movement) # timer trick from http://stackoverflow.com/q/4938723/938949 timer = QtCore.QTimer() # Let the interpreter run each 200 ms: timer.timeout.connect(lambda: None) timer.start(200) # hold onto ref, so the timer doesn't get cleaned up self._sigint_timer = timer @catch_config_error def initialize(self, argv=None): self.init_qt_app() super(JupyterQtConsoleApp, self).initialize(argv) JupyterConsoleApp.initialize(self, argv) self.init_qt_elements() self.init_signal() def start(self): # draw the window if self.maximize: self.window.showMaximized() else: self.window.show() self.window.raise_() # Start the application main loop. self.app.exec_()
class SlidesExporter(HTMLExporter): """Exports HTML slides with reveal.js""" # Overrides from HTMLExporter ################################# export_from_notebook = "Reveal.js slides" @default('template_name') def _template_name_default(self): return 'reveal' @default('file_extension') def _file_extension_default(self): return '.slides.html' @default('template_extension') def _template_extension_default(self): return '.html.j2' # Extra resources ################################# reveal_url_prefix = Unicode( help="""The URL prefix for reveal.js (version 3.x). This defaults to the reveal CDN, but can be any url pointing to a copy of reveal.js. For speaker notes to work, this must be a relative path to a local copy of reveal.js: e.g., "reveal.js". If a relative path is given, it must be a subdirectory of the current directory (from which the server is run). See the usage documentation (https://nbconvert.readthedocs.io/en/latest/usage.html#reveal-js-html-slideshow) for more details. """ ).tag(config=True) @default('reveal_url_prefix') def _reveal_url_prefix_default(self): if 'RevealHelpPreprocessor.url_prefix' in self.config: warn("Please update RevealHelpPreprocessor.url_prefix to " "SlidesExporter.reveal_url_prefix in config files.") return self.config.RevealHelpPreprocessor.url_prefix return 'https://unpkg.com/[email protected]' reveal_theme = Unicode('simple', help=""" Name of the reveal.js theme to use. We look for a file with this name under ``reveal_url_prefix``/css/theme/``reveal_theme``.css. https://github.com/hakimel/reveal.js/tree/master/css/theme has list of themes that ship by default with reveal.js. """ ).tag(config=True) reveal_transition = Unicode('slide', help=""" Name of the reveal.js transition to use. The list of transitions that ships by default with reveal.js are: none, fade, slide, convex, concave and zoom. """ ).tag(config=True) reveal_scroll = Bool(False, help=""" If True, enable scrolling within each slide """ ).tag(config=True) font_awesome_url = Unicode( "https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.7.0/css/font-awesome.css", help=""" URL to load font awesome from. Defaults to loading from cdnjs. """ ).tag(config=True) def _init_resources(self, resources): resources = super()._init_resources(resources) if 'reveal' not in resources: resources['reveal'] = {} resources['reveal']['url_prefix'] = self.reveal_url_prefix resources['reveal']['theme'] = self.reveal_theme resources['reveal']['transition'] = self.reveal_transition resources['reveal']['scroll'] = self.reveal_scroll return resources
class Exchange(ABCExchange): root = Unicode( "/srv/nbgrader/exchange", help= "The nbgrader exchange directory writable to everyone. MUST be preexisting." ).tag(config=True) cache = Unicode( "", help= "Local cache directory for nbgrader submit and nbgrader list. Defaults to $JUPYTER_DATA_DIR/nbgrader_cache" ).tag(config=True) @default("cache") def _cache_default(self): return os.path.join(jupyter_data_dir(), 'nbgrader_cache') path_includes_course = Bool(False, help=dedent(""" Whether the path for fetching/submitting assignments should be prefixed with the course name. If this is `False`, then the path will be something like `./ps1`. If this is `True`, then the path will be something like `./course123/ps1`. """)).tag(config=True) def set_perms(self, dest, fileperms, dirperms): all_dirs = [] for dirname, _, filenames in os.walk(dest): for filename in filenames: os.chmod(os.path.join(dirname, filename), fileperms) all_dirs.append(dirname) for dirname in all_dirs[::-1]: os.chmod(dirname, dirperms) def ensure_root(self): """See if the exchange directory exists and is writable, fail if not.""" if not check_directory(self.root, write=True, execute=True): self.fail( "Unwritable directory, please contact your instructor: {}". format(self.root)) def init_src(self): """Compute and check the source paths for the transfer.""" raise NotImplementedError def init_dest(self): """Compute and check the destination paths for the transfer.""" raise NotImplementedError def copy_files(self): """Actually do the file transfer.""" raise NotImplementedError def do_copy(self, src, dest, log=None): """ Copy the src dir to the dest dir, omitting excluded file/directories, non included files, and too large files, as specified by the options coursedir.ignore, coursedir.include and coursedir.max_file_size. """ shutil.copytree(src, dest, ignore=ignore_patterns( exclude=self.coursedir.ignore, include=self.coursedir.include, max_file_size=self.coursedir.max_file_size, log=self.log)) # copytree copies access mode too - so we must add go+rw back to it if # we are in groupshared. if self.coursedir.groupshared: for dirname, _, filenames in os.walk(dest): # dirs become ug+rwx st_mode = os.stat(dirname).st_mode if st_mode & 0o2770 != 0o2770: try: os.chmod(dirname, (st_mode | 0o2770) & 0o2777) except PermissionError: self.log.warning( "Could not update permissions of %s to make it groupshared", dirname) for filename in filenames: filename = os.path.join(dirname, filename) st_mode = os.stat(filename).st_mode if st_mode & 0o660 != 0o660: try: os.chmod(filename, (st_mode | 0o660) & 0o777) except PermissionError: self.log.warning( "Could not update permissions of %s to make it groupshared", filename) def start(self): if sys.platform == 'win32': self.fail("Sorry, the exchange is not available on Windows.") if not self.coursedir.groupshared: # This just makes sure that directory is o+rwx. In group shared # case, it is up to admins to ensure that instructors can write # there. self.ensure_root() return super(Exchange, self).start() def _assignment_not_found(self, src_path, other_path): msg = "Assignment not found at: {}".format(src_path) self.log.fatal(msg) found = glob.glob(other_path) if found: scores = sorted([(fuzz.ratio(self.src_path, x), x) for x in found]) self.log.error("Did you mean: %s", scores[-1][1]) raise ExchangeError(msg) def ensure_directory(self, path, mode): """Ensure that the path exists, has the right mode and is self owned.""" if not os.path.isdir(path): os.makedirs(path) # For some reason, Python won't create a directory with a mode of 0o733 # so we have to create and then chmod. os.chmod(path, mode) else: if not self.coursedir.groupshared and not self_owned(path): self.fail("You don't own the directory: {}".format(path))
class Plot(widgets.DOMWidget): """ Main K3D widget. Attributes: antialias: `int`: Enable antialiasing in WebGL renderer, changes have no effect after displaying. height: `int`: Height of the Widget in pixels, changes have no effect after displaying. background_color: `int`. Packed RGB color of the plot background (0xff0000 is red, 0xff is blue), -1 is for transparent. camera_auto_fit: `bool`. Enable automatic camera setting after adding, removing or changing a plot object. grid_auto_fit: `bool`. Enable automatic adjustment of the plot grid to contained objects. grid_visible: `bool`. Enable or disable grid. screenshot_scale: `Float`. Multipiler to screenshot resolution. voxel_paint_color: `int`. The (initial) int value to be inserted when editing voxels. lighting: `Float`. Lighting factor. grid: `array_like`. 6-element tuple specifying the bounds of the plot grid (x0, y0, z0, x1, y1, z1). camera: `array_like`. 9-element list or array specifying camera position. camera_no_rotate: `Bool`. Lock for camera rotation. camera_no_zoom: `Bool`. Lock for camera zoom. camera_no_pan: `Bool`. Lock for camera pan. camera_rotate_speed: `Float`. Speed of camera rotation. camera_zoom_speed: `Float`. Speed of camera zoom. camera_pan_speed: `Float`. Speed of camera pan. camera_fov: `Float`. Camera Field of View. snapshot_include_js: `Bool`. If it's true snapshot html is standalone. axes: `list`. Axes labels for plot. time: `list`. Time value (used in TimeSeries) name: `string`. Name of the plot. Used to filenames of snapshot/screenshot etc. mode: `str`. Mode of K3D viewer. Legal values are: :`view`: No interaction with objects, :`add`: On voxels objects adding mode, :`change`: On voxels objects edit mode, :`callback`: Handling click_callback and hover_callback on some type of objects, :`manipulate`: Enable object transform widget. camera_mode: `str`. Mode of camera movement. Legal values are: :`trackball`: orbit around point with dynamic up-vector of camera, :`orbit`: orbit around point with fixed up-vector of camera, :`fly`: orbit around point with dynamic up-vector of camera, mouse wheel also moves target point. manipulate_mode: `str`. Mode of manipulate widgets. Legal values are: :`translate`: Translation widget, :`rotate`: Rotation widget, :`scale`: Scaling widget. auto_rendering: `Bool`. State of auto rendering. fps: `Float`. Fps of animation. objects: `list`. List of `k3d.objects.Drawable` currently included in the plot, not to be changed directly. """ _view_name = Unicode('PlotView').tag(sync=True) _model_name = Unicode('PlotModel').tag(sync=True) _view_module = Unicode('k3d').tag(sync=True) _model_module = Unicode('k3d').tag(sync=True) _view_module_version = Unicode(version).tag(sync=True) _model_module_version = Unicode(version).tag(sync=True) _backend_version = Unicode(version).tag(sync=True) # readonly (specified at creation) antialias = Int(min=0, max=5).tag(sync=True) height = Int().tag(sync=True) # readonly (not to be modified directly) object_ids = List().tag(sync=True) # read-write camera_auto_fit = Bool(True).tag(sync=True) auto_rendering = Bool(True).tag(sync=True) snapshot_include_js = Bool(True).tag(sync=True) lighting = Float().tag(sync=True) fps = Float().tag(sync=True) grid_auto_fit = Bool(True).tag(sync=True) grid_visible = Bool(True).tag(sync=True) fps_meter = Bool(True).tag(sync=True) menu_visibility = Bool(True).tag(sync=True) screenshot_scale = Float().tag(sync=True) time = Float().tag(sync=True) grid = ListOrArray((-1, -1, -1, 1, 1, 1), minlen=6, maxlen=6).tag(sync=True) background_color = Int().tag(sync=True) voxel_paint_color = Int().tag(sync=True) camera = ListOrArray(minlen=9, maxlen=9, empty_ok=True).tag(sync=True) camera_animation = TimeSeries( ListOrArray(minlen=9, maxlen=9, empty_ok=True)).tag(sync=True) camera_no_rotate = Bool(False).tag(sync=True) camera_no_zoom = Bool(False).tag(sync=True) camera_no_pan = Bool(False).tag(sync=True) camera_rotate_speed = Float().tag(sync=True) camera_zoom_speed = Float().tag(sync=True) camera_pan_speed = Float().tag(sync=True) clipping_planes = ListOrArray(empty_ok=True).tag(sync=True) colorbar_object_id = Int(-1).tag(sync=True) colorbar_scientific = Bool(False).tag(sync=True) rendering_steps = Int(1).tag(sync=True) screenshot = Unicode().tag(sync=True) snapshot = Unicode().tag(sync=True) camera_fov = Float().tag(sync=True) name = Unicode(default_value=None, allow_none=True).tag(sync=True) axes = List(minlen=3, maxlen=3, default_value=['x', 'y', 'z']).tag(sync=True) axes_helper = Float().tag(sync=True) mode = Unicode().tag(sync=True) camera_mode = Unicode().tag(sync=True) manipulate_mode = Unicode().tag(sync=True) objects = [] def __init__(self, antialias=3, background_color=0xFFFFFF, camera_auto_fit=True, grid_auto_fit=True, grid_visible=True, height=512, voxel_paint_color=0, grid=(-1, -1, -1, 1, 1, 1), screenshot_scale=2.0, lighting=1.5, time=0.0, fps_meter=False, menu_visibility=True, colorbar_object_id=-1, rendering_steps=1, axes=['x', 'y', 'z'], camera_no_rotate=False, camera_no_zoom=False, snapshot_include_js=True, camera_no_pan=False, camera_rotate_speed=1.0, camera_zoom_speed=1.2, camera_pan_speed=0.3, camera_fov=45.0, axes_helper=1.0, name=None, mode='view', camera_mode='trackball', manipulate_mode='translate', auto_rendering=True, fps=25.0, *args, **kwargs): super(Plot, self).__init__() self.antialias = antialias self.camera_auto_fit = camera_auto_fit self.grid_auto_fit = grid_auto_fit self.fps_meter = fps_meter self.fps = fps self.grid = grid self.grid_visible = grid_visible self.background_color = background_color self.voxel_paint_color = voxel_paint_color self.screenshot_scale = screenshot_scale self.height = height self.lighting = lighting self.time = time self.menu_visibility = menu_visibility self.colorbar_object_id = colorbar_object_id self.rendering_steps = rendering_steps self.camera_no_rotate = camera_no_rotate self.camera_no_zoom = camera_no_zoom self.camera_no_pan = camera_no_pan self.camera_rotate_speed = camera_rotate_speed self.camera_zoom_speed = camera_zoom_speed self.camera_pan_speed = camera_pan_speed self.camera_fov = camera_fov self.axes = axes self.axes_helper = axes_helper self.name = name self.mode = mode self.snapshot_include_js = snapshot_include_js self.camera_mode = camera_mode self.manipulate_mode = manipulate_mode self.auto_rendering = auto_rendering self.camera = [2, -3, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0] self.object_ids = [] self.objects = [] self.outputs = [] def __iadd__(self, objs): """Add Drawable to plot.""" assert isinstance(objs, Drawable) for obj in objs: if obj.id not in self.object_ids: self.object_ids = self.object_ids + [obj.id] self.objects.append(obj) return self def __isub__(self, objs): """Remove Drawable from plot.""" assert isinstance(objs, Drawable) for obj in objs: self.object_ids = [id_ for id_ in self.object_ids if id_ != obj.id] if obj in self.objects: self.objects.remove(obj) return self def display(self, **kwargs): """Show plot inside ipywidgets.Output().""" output = widgets.Output() with output: display(self, **kwargs) self.outputs.append(output) display(output) def render(self): """Trigger rendering on demand. Useful when self.auto_rendering == False.""" self.send({'msg_type': 'render'}) def start_auto_play(self): """Start animation of plot with objects using TimeSeries.""" self.send({'msg_type': 'start_auto_play'}) def stop_auto_play(self): """Stop animation of plot with objects using TimeSeries.""" self.send({'msg_type': 'stop_auto_play'}) def close(self): """Remove plot from all its ipywidgets.Output()-s.""" for output in self.outputs: output.clear_output() self.outputs = [] def camera_reset(self, factor=1.5): """Trigger auto-adjustment of camera. Useful when self.camera_auto_fit == False.""" self.send({'msg_type': 'reset_camera', 'factor': factor}) def get_auto_grid(self): d = np.stack([o.get_bounding_box() for o in self.objects]) return np.dstack( [np.min(d[:, 0::2], axis=0), np.max(d[:, 1::2], axis=0)]).flatten() def get_auto_camera(self, factor=1.5, yaw=25, pitch=15): bounds = self.get_auto_grid() center = (bounds[::2] + bounds[1::2]) / 2.0 radius = 0.5 * np.sum(np.abs(bounds[::2] - bounds[1::2])**2)**0.5 cam_distance = radius * factor / np.sin( np.deg2rad(self.camera_fov / 2.0)) x = np.sin(np.deg2rad(pitch)) * np.cos(np.deg2rad(yaw)) y = np.sin(np.deg2rad(pitch)) * np.sin(np.deg2rad(yaw)) z = np.cos(np.deg2rad(pitch)) if pitch not in [0, 180]: up = [0, 0, 1] else: up = [0, 1, 1] return [ center[0] + x * cam_distance, center[1] + y * cam_distance, center[2] + z * cam_distance, center[0], center[1], center[2], up[0], up[1], up[2] ] def fetch_screenshot(self, only_canvas=False): """Request creating a PNG screenshot on the JS side and saving it in self.screenshot The result is a string of a PNG file in base64 encoding. This function requires a round-trip of websocket messages. The result will be available after the current cell finishes execution.""" self.send({'msg_type': 'fetch_screenshot', 'only_canvas': only_canvas}) def yield_screenshots(self, generator_function): """Decorator for a generator function receiving screenshots via yield.""" @wraps(generator_function) def inner(): generator = generator_function() def send_new_value(change): try: generator.send(base64.b64decode(change.new)) except StopIteration: self.unobserve(send_new_value, 'screenshot') self.observe(send_new_value, 'screenshot') # start the decorated generator generator.send(None) return inner def fetch_snapshot(self, compression_level=9): """Request creating a HTML snapshot on the JS side and saving it in self.snapshot The result is a string: a HTML document with this plot embedded. This function requires a round-trip of websocket messages. The result will be available after the current cell finishes execution.""" self.send({ 'msg_type': 'fetch_snapshot', 'compression_level': compression_level }) def yield_snapshots(self, generator_function): """Decorator for a generator function receiving snapshots via yield.""" @wraps(generator_function) def inner(): generator = generator_function() def send_new_value(change): try: generator.send(base64.b64decode(change.new)) except StopIteration: self.unobserve(send_new_value, 'snapshot') self.observe(send_new_value, 'snapshot') # start the decorated generator generator.send(None) return inner def get_binary_snapshot_objects(self): import msgpack from .helpers import to_json snapshot = {"objects": [], "chunkList": []} for o in self.objects: obj = {} for k, v in o.traits().items(): if 'sync' in v.metadata: obj[k] = to_json(k, o[k], o, o['compression_level']) snapshot['objects'].append(obj) return msgpack.packb(snapshot, use_bin_type=True) def get_snapshot_params(self): return { "cameraAutoFit": self.camera_auto_fit, "menuVisibility": self.menu_visibility, "gridAutoFit": self.grid_auto_fit, "gridVisible": self.grid_visible, "grid": self.grid, "antialias": self.antialias, "screenshotScale": self.screenshot_scale, "clearColor": self.background_color, "clippingPlanes": self.clipping_planes, "lighting": self.lighting, "time": self.time, "fpsMeter": self.fps_meter, "cameraMode": self.camera_mode, "colorbarObjectId": self.colorbar_object_id, "axes": self.axes, "cameraNoRotate": self.camera_no_rotate, "cameraNoZoom": self.camera_no_zoom, "cameraNoPan": self.camera_no_pan, "cameraRotateSpeed": self.camera_rotate_speed, "cameraZoomSpeed": self.camera_zoom_speed, "cameraPanSpeed": self.camera_pan_speed, "name": self.name, "camera_fov": self.camera_fov, "axesHelper": self.axes_helper, "cameraAnimation": self.camera_animation, "fps": self.fps } def get_snapshot(self, compression_level=9, additional_js_code=''): """Produce on the Python side a HTML document with the current plot embedded.""" import os import io import zlib dir_path = os.path.dirname(os.path.realpath(__file__)) data = self.get_binary_snapshot_objects() data = base64.b64encode(zlib.compress(data, compression_level)) if self.snapshot_include_js: f = io.open(os.path.join(dir_path, 'static', 'snapshot_standalone.txt'), mode="r", encoding="utf-8") template = f.read() f.close() f = io.open(os.path.join(dir_path, 'static', 'standalone.js'), mode="r", encoding="utf-8") template = template.replace( '[K3D_SOURCE]', base64.b64encode( zlib.compress(f.read().encode(), compression_level)).decode("utf-8")) f.close() f = io.open(os.path.join(dir_path, 'static', 'require.js'), mode="r", encoding="utf-8") template = template.replace('[REQUIRE_JS]', f.read()) f.close() f = io.open(os.path.join(dir_path, 'static', 'pako_inflate.min.js'), mode="r", encoding="utf-8") template = template.replace('[PAKO_JS]', f.read()) f.close() else: f = io.open(os.path.join(dir_path, 'static', 'snapshot_online.txt'), mode="r", encoding="utf-8") template = f.read() f.close() template = template.replace('[VERSION]', self._view_module_version) template = template.replace('[DATA]', data.decode("utf-8")) params = self.get_snapshot_params() template = template.replace('[PARAMS]', json.dumps(params)) template = template.replace('[CAMERA]', str(self.camera)) template = template.replace('[ADDITIONAL]', additional_js_code) return template
class YarnBackend(DBBackendBase): """A cluster backend for managing dask clusters on Hadoop/YARN.""" cluster_config_class = Type( "dask_gateway_server.backends.yarn.YarnClusterConfig", klass="dask_gateway_server.backends.base.ClusterConfig", help="The cluster config class to use", config=True, ) principal = Unicode( None, help="Kerberos principal for Dask Gateway user", allow_none=True, config=True, ) keytab = Unicode( None, help="Path to kerberos keytab for Dask Gateway user", allow_none=True, config=True, ) app_client_cache_max_size = Integer( 10, help=""" The max size of the cache for application clients. A larger cache will result in improved performance, but will also use more resources. """, config=True, ) def async_apply(self, f, *args, **kwargs): return get_running_loop().run_in_executor(None, lambda: f(*args, **kwargs)) def _get_security(self, cluster): return skein.Security(cert_bytes=cluster.tls_cert, key_bytes=cluster.tls_key) async def _get_app_client(self, cluster): out = self.app_client_cache.get(cluster.name) if out is None: app_id = cluster.state["app_id"] security = self._get_security(cluster) if cluster.name not in self.app_address_cache: # Lookup and cache the application address report = self.skein_client.application_report(app_id) if report.state != "RUNNING": # pragma: nocover raise ValueError("Application %s is not running" % app_id) app_address = "%s:%d" % (report.host, report.port) self.app_address_cache[cluster.name] = app_address app_address = self.app_address_cache[cluster.name] out = skein.ApplicationClient(app_address, app_id, security=security) self.app_client_cache.put(cluster.name, out) return out def get_worker_command(self, cluster): return [ cluster.config.worker_cmd, "--nthreads", "$SKEIN_RESOURCE_VCORES", "--memory-limit", "${SKEIN_RESOURCE_MEMORY}MiB", ] def _build_specification(self, cluster, cert_path, key_path): files = { k: skein.File.from_dict(v) if isinstance(v, dict) else v for k, v in cluster.config.localize_files.items() } files["dask.crt"] = cert_path files["dask.pem"] = key_path env = self.get_env(cluster) scheduler_cmd = " ".join(self.get_scheduler_command(cluster)) worker_cmd = " ".join(self.get_worker_command(cluster)) scheduler_script = f"{cluster.config.scheduler_setup}\n{scheduler_cmd}" worker_script = f"{cluster.config.worker_setup}\n{worker_cmd}" master = skein.Master( security=self._get_security(cluster), resources=skein.Resources( memory="%d b" % cluster.config.scheduler_memory, vcores=cluster.config.scheduler_cores, ), files=files, env=env, script=scheduler_script, ) services = { "dask.worker": skein.Service( resources=skein.Resources( memory="%d b" % cluster.config.worker_memory, vcores=cluster.config.worker_cores, ), instances=0, max_restarts=0, allow_failures=True, files=files, env=env, script=worker_script, ) } return skein.ApplicationSpec( name="dask-gateway", queue=cluster.config.queue, user=cluster.username, master=master, services=services, ) supports_bulk_shutdown = True async def do_setup(self): self.skein_client = await self.async_apply( skein.Client, principal=self.principal, keytab=self.keytab, security=skein.Security.new_credentials(), ) self.app_client_cache = LRUCache(self.app_client_cache_max_size) self.app_address_cache = {} async def do_cleanup(self): self.skein_client.close() async def do_start_cluster(self, cluster): with NamedTemporaryFile() as cert_fil, NamedTemporaryFile() as key_fil: cert_fil.write(cluster.tls_cert) cert_fil.file.flush() key_fil.write(cluster.tls_key) key_fil.file.flush() spec = self._build_specification(cluster, cert_fil.name, key_fil.name) app_id = await self.async_apply(self.skein_client.submit, spec) yield {"app_id": app_id} async def do_stop_cluster(self, cluster): app_id = cluster.state.get("app_id") if app_id is None: return await self.async_apply(self.skein_client.kill_application, app_id) # Remove cluster from caches self.app_client_cache.discard(cluster.name) self.app_address_cache.pop(cluster.name, None) async def do_check_clusters(self, clusters): results = [] for cluster in clusters: app_id = cluster.state.get("app_id") if app_id is None: return False report = await self.async_apply( self.skein_client.application_report, app_id ) ok = str(report.state) not in {"FAILED", "KILLED", "FINISHED"} results.append(ok) return results async def do_start_worker(self, worker): app = await self._get_app_client(worker.cluster) container = await self.async_apply( app.add_container, "dask.worker", env={"DASK_GATEWAY_WORKER_NAME": worker.name}, ) yield {"container_id": container.id} async def do_stop_worker(self, worker): container_id = worker.state.get("container_id") if container_id is None: return app = await self._get_app_client(worker.cluster) try: await self.async_apply(app.kill_container, container_id) except ValueError: pass async def do_check_workers(self, workers): grouped = defaultdict(list) for w in workers: grouped[w.cluster].append(w) results = {} for cluster, workers in grouped.items(): app = await self._get_app_client(cluster) try: containers = await self.async_apply( app.get_containers, services=("dask.worker",) ) active = {c.id for c in containers} results.update( {w.name: w.state.get("container_id") in active for w in workers} ) except Exception as exc: self.log.debug( "Error getting worker statuses for cluster %s", cluster.name, exc_info=exc, ) results.update({w.name: False for w in workers}) return [results[w.name] for w in workers]
class ClingKernel(Kernel): """Cling Kernel for Jupyter""" implementation = 'cling_kernel' implementation_version = __version__ language_version = 'X' banner = Unicode() def _banner_default(self): return 'cling-%s' % self.language_version return self._banner # codemirror_mode='clike' *should* work but doesn't, using the mimetype instead language_info = {'name': 'c++', 'codemirror_mode': 'text/x-c++src', 'mimetype': ' text/x-c++src', 'file_extension': '.c++'} flush_interval = Float(0.25, config=True) std = CaselessStrEnum(default_value='c++11', values = ['c++11', 'c++14'], help="C++ standard to use, either c++14 or c++11").tag(config=True); def __init__(self, **kwargs): super(ClingKernel, self).__init__(**kwargs) try: whichCling = os.readlink(shutil.which('cling')) except AttributeError: from distutils.spawn import find_executable whichCling = find_executable('cling') if whichCling: clingInstDir = os.path.dirname(os.path.dirname(whichCling)) llvmResourceDir = clingInstDir else: raise RuntimeError('Cannot find cling in $PATH. No cling, no fun.') for ext in ['so', 'dylib', 'dll']: libFilename = clingInstDir + "/lib/libclingJupyter." + ext if os.access(libFilename, os.R_OK): self.libclingJupyter = ctypes.CDLL(clingInstDir + "/lib/libclingJupyter." + ext, mode = ctypes.RTLD_GLOBAL) break if not getattr(self, 'libclingJupyter', None): raise RuntimeError('Cannot find ' + clingInstDir + '/lib/libclingJupyter.{so,dylib,dll}') self.libclingJupyter.cling_create.restype = my_void_p self.libclingJupyter.cling_eval.restype = my_void_p #build -std=c++11 or -std=c++14 option stdopt = ("-std=" + self.std).encode('utf-8') self.log.info("Using {}".format(stdopt.decode('utf-8'))) #from IPython.utils import io #io.rprint("DBG: Using {}".format(stdopt.decode('utf-8'))) strarr = ctypes.c_char_p*5 argv = strarr(b"clingJupyter",stdopt, b"-I" + clingInstDir.encode('utf-8') + b"/include/",b"",b"") llvmResourceDirCP = ctypes.c_char_p(llvmResourceDir.encode('utf8')) self.output_pipe, pipe_in = os.pipe() self.interp = self.libclingJupyter.cling_create(5, argv, llvmResourceDirCP, pipe_in) self.libclingJupyter.cling_complete_start.restype = my_void_p self.libclingJupyter.cling_complete_next.restype = my_void_p #c_char_p self.output_thread = threading.Thread(target=self.publish_pipe_output) self.output_thread.daemon = True self.output_thread.start() def _recv_dict(self, pipe): """Receive a serialized dict on a pipe Returns the dictionary. """ # Wire format: # // Pipe sees (all numbers are longs, except for the first): # // - num bytes in a long (sent as a single unsigned char!) # // - num elements of the MIME dictionary; Jupyter selects one to display. # // For each MIME dictionary element: # // - length of MIME type key # // - MIME type key # // - size of MIME data buffer (including the terminating 0 for # // 0-terminated strings) # // - MIME data buffer data = {} b1 = os.read(pipe, 1) sizeof_long = struct.unpack('B', b1)[0] if sizeof_long == 8: fmt = 'Q' else: fmt = 'L' buf = os.read(pipe, sizeof_long) num_elements = struct.unpack(fmt, buf)[0] for i in range(num_elements): buf = os.read(pipe, sizeof_long) len_key = struct.unpack(fmt, buf)[0] key = os.read(pipe, len_key).decode('utf8') buf = os.read(pipe, sizeof_long) len_value = struct.unpack(fmt, buf)[0] value = os.read(pipe, len_value).decode('utf8') data[key] = value return data def publish_pipe_output(self): """Watch output_pipe for display-data messages and publish them on IOPub when they arrive """ while True: select.select([self.output_pipe], [], []) data = self._recv_dict(self.output_pipe) self.session.send(self.iopub_socket, 'display_data', content={ 'data': data, 'metadata': {}, }, parent=self._parent_header, ) @contextmanager def forward_stream(self, name): """Capture stdout and forward it as stream messages""" # create pipe for stdout if name == 'stdout': c_flush_p = c_stdout_p elif name == 'stderr': c_flush_p = c_stderr_p else: raise ValueError("Name must be stdout or stderr, not %r" % name) real_fd = getattr(sys, '__%s__' % name).fileno() save_fd = os.dup(real_fd) pipe_out, pipe_in = os.pipe() os.dup2(pipe_in, real_fd) os.close(pipe_in) # make pipe_out non-blocking flags = fcntl(pipe_out, F_GETFL) fcntl(pipe_out, F_SETFL, flags|os.O_NONBLOCK) def forwarder(pipe): """Forward bytes on a pipe to stream messages""" while True: r, w, x = select.select([pipe], [], [], self.flush_interval) if not r: # nothing to read, flush libc's stdout and check again libc.fflush(c_flush_p) continue data = os.read(pipe, 1024) if not data: # pipe closed, we are done break # send output self.session.send(self.iopub_socket, 'stream', { 'name': name, 'text': data.decode('utf8', 'replace'), }, parent=self._parent_header) t = threading.Thread(target=forwarder, args=(pipe_out,)) t.start() try: yield finally: # flush the pipe libc.fflush(c_flush_p) os.close(real_fd) t.join() # and restore original stdout os.close(pipe_out) os.dup2(save_fd, real_fd) os.close(save_fd) def run_cell(self, code, silent=False): return self.libclingJupyter.cling_eval(self.interp, ctypes.c_char_p(code.encode('utf8'))) def do_execute(self, code, silent, store_history=True, user_expressions=None, allow_stdin=False): if not code.strip(): return { 'status': 'ok', 'execution_count': self.execution_count, 'payload': [], 'user_expressions': {}, } status = 'ok' with self.forward_stream('stdout'), self.forward_stream('stderr'): stringResult = self.run_cell(code, silent) if not stringResult: status = 'error' else: self.session.send( self.iopub_socket, 'execute_result', content={ 'data': { 'text/plain': ctypes.cast(stringResult, ctypes.c_char_p).value.decode('utf8', 'replace'), }, 'metadata': {}, 'execution_count': self.execution_count, }, parent=self._parent_header ) self.libclingJupyter.cling_eval_free(stringResult) reply = { 'status': status, 'execution_count': self.execution_count, } if status == 'error': err = { 'ename': 'ename', 'evalue': 'evalue', 'traceback': [], } self.send_response(self.iopub_socket, 'error', err) reply.update(err) elif status == 'ok': reply.update({ 'THIS DOES NOT WORK: payload': [{ 'source': 'set_next_input', 'replace': True, 'text':'//THIS IS MAGIC\n' + code }], 'user_expressions': {}, }) else: raise ValueError("Invalid status: %r" % status) return reply def do_complete(self, code, cursor_pos): """Provide completions here""" # if cursor_pos = cursor_start = cursor_end, # matches should be a list of strings to be appended after the cursor return {'matches' : [], 'cursor_end' : cursor_pos, 'cursor_start' : cursor_pos, 'metadata' : {}, 'status' : 'ok'}
class PluginBlock(Block): _view_name = Unicode('PluginBlockView').tag(sync=True) _model_name = Unicode('PluginBlockModel').tag(sync=True) _parent_block = Instance(BlockType, allow_none=True, default_value=None) _available_input_data = List([]) _available_input_components = List([]) _input_data_dim = Int(allow_none=True, default_value=None) # TODO Validate data/components names and synchronise JavaScript -> Python input_data = Unicode(allow_none=True, default_value=None).tag(sync=True) input_components = List(Union((Unicode(), Int()))).tag(sync=True) def __init__(self, *args, **kwargs): super(PluginBlock, self).__init__(*args, **kwargs) self.input_data_wid = None self.input_components_wid = None def _ipython_display_(self, *args, **kwargs): display(self.interact()) def interact(self): pass def _get_data(self, parent): block = parent while not isinstance(block, DataBlock): block = block._parent_block return block.mesh.data @observe('_parent_block') def _update_input_data(self, change): parent = change['new'] if parent is None: return data = self._get_data(parent) self._available_input_data = [d.name for d in data] self.input_data = self._available_input_data[0] @observe('input_data') def _update_available_components(self, change): data = self._get_data(self._parent_block) for d in data: if d.name == change['new']: current_data = d self._available_input_components = [c.name for c in current_data.components] + [0] @observe('_available_input_components') def _update_input_components(self, change): if self._input_data_dim is None: return available_components = change['new'] if self.input_components_wid is not None: for component_wid in self.input_components_wid: component_wid.options = available_components # Check current components validity components_are_valid = True if not len(self.input_components): components_are_valid = False for c in self.input_components: if c not in available_components: components_are_valid = False if components_are_valid: return new_components = [] for dim in range(self._input_data_dim): if len(available_components) <= dim: new_components.append(0) continue new_components.append(available_components[dim]) self.input_components = new_components def _link_dropdown(self, dropdown, dim): def handle_dropdown_change(change): copy = self.input_components.copy() copy[dim] = change['new'] self.input_components = copy dropdown.observe(handle_dropdown_change, names=['value']) def handle_input_change(change): dropdown.value = self.input_components[dim] self.observe(handle_input_change, names=['input_components']) link((dropdown, 'options'), (self, '_available_input_components')) def _init_input_data_widgets(self): self.input_components_wid = [Label('Input components')] for dim in range(self._input_data_dim): dropdown = Dropdown( options=self._available_input_components, value=self.input_components[dim] ) dropdown.layout.width = 'fit-content' self._link_dropdown(dropdown, dim) self.input_components_wid.append(dropdown) self.input_data_wid = Dropdown( description='Input data', options=self._available_input_data, value=self.input_data ) self.input_data_wid.layout.width = 'fit-content' link((self.input_data_wid, 'value'), (self, 'input_data')) def _interact(self): if self._input_data_dim is not None: if self.input_data_wid is None: self._init_input_data_widgets() return (VBox((self.input_data_wid, HBox(self.input_components_wid))), ) return () def _get_component_min_max(self, data_name, component_name): data = self._get_data(self._parent_block) for d in data: if d.name == data_name: for c in d.components: if c.name == component_name: return (c.min, c.max) raise RuntimeError('Unknown component {}.{}'.format( data_name, component_name))
class Drawing(GMapsWidgetMixin, widgets.Widget): """ Widget for a drawing layer Add this to a :class:`gmaps.Map` or :class:`gmaps.Figure` instance to let you draw on the map. You should not need to instantiate this directly. Instead, use the :func:`gmaps.drawing_layer` factory function. :Examples: {examples} {params} :param mode: Initial drawing mode. One of ``DISABLED``, ``MARKER``, ``LINE``, ``POLYGON`` or ``DELETE``. Defaults to ``MARKER`` if ``toolbar_controls.show_controls`` is True, otherwise defaults to ``DISABLED``. :type mode: str, optional :param toolbar_controls: Widget representing the drawing toolbar. :type toolbar_controls: :class:`gmaps.DrawingControls`, optional """ has_bounds = False _view_name = Unicode('DrawingLayerView').tag(sync=True) _model_name = Unicode('DrawingLayerModel').tag(sync=True) features = List().tag(sync=True, **widgets.widget_serialization) mode = Enum(ALLOWED_DRAWING_MODES).tag(sync=True) marker_options = Instance(MarkerOptions, allow_none=False) toolbar_controls = Instance(DrawingControls, allow_none=False).tag( sync=True, **widgets.widget_serialization) def __init__(self, **kwargs): kwargs['mode'] = self._get_initial_mode(kwargs) self._new_feature_callbacks = [] super(Drawing, self).__init__(**kwargs) self.on_msg(self._handle_message) # Observe all changes to the marker_options # to let users change these directly # and still trigger appropriate changes self.marker_options.observe(self._on_marker_options_change) def on_new_feature(self, callback): """ Register a callback called when new features are added :param callback: Callable to be called when a new feature is added. The callback should take a single argument, the feature that has been added. This can be an instance of :class:`gmaps.Line`, :class:`gmaps.Marker` or :class:`gmaps.Polygon`. :type callback: callable """ self._new_feature_callbacks.append(callback) def _get_initial_mode(self, constructor_kwargs): try: mode = constructor_kwargs['mode'] except KeyError: # mode not explicitly specified controls_hidden = ( 'toolbar_controls' in constructor_kwargs and not constructor_kwargs['toolbar_controls'].show_controls) if controls_hidden: mode = 'DISABLED' else: mode = DEFAULT_DRAWING_MODE return mode def _on_marker_options_change(self, change): self.marker_options = copy.deepcopy(self.marker_options) @default('marker_options') def _default_marker_options(self): return MarkerOptions() @default('toolbar_controls') def _default_toolbar_controls(self): return DrawingControls() @observe('features') def _on_new_feature(self, change): if self._new_feature_callbacks: old_features = set(change['old']) new_features = [ feature for feature in change['new'] if feature not in old_features ] for feature in new_features: for callback in self._new_feature_callbacks: callback(feature) def _delete_feature(self, model_id): updated_features = [ feature for feature in self.features if feature.model_id != model_id ] self.features = updated_features def _handle_message(self, _, content, buffers): if content.get('event') == 'FEATURE_ADDED': payload = content['payload'] if payload['featureType'] == 'MARKER': latitude = payload['latitude'] longitude = payload['longitude'] feature = self.marker_options.to_marker(latitude, longitude) elif payload['featureType'] == 'LINE': start = payload['start'] end = payload['end'] feature = Line(start=start, end=end) elif payload['featureType'] == 'POLYGON': path = payload['path'] feature = Polygon(path) self.features = self.features + [feature] elif content.get('event') == 'MODE_CHANGED': payload = content['payload'] mode = payload['mode'] self.mode = mode elif content.get('event') == 'FEATURE_DELETED': payload = content['payload'] model_id = payload['modelId'] self._delete_feature(model_id)
class Block(Widget, BlockType): _view_name = Unicode('BlockView').tag(sync=True) _model_name = Unicode('BlockModel').tag(sync=True) _view_module = Unicode('odysis').tag(sync=True) _model_module = Unicode('odysis').tag(sync=True) _view_module_version = Unicode(odysis_version).tag(sync=True) _model_module_version = Unicode(odysis_version).tag(sync=True) _blocks = List(Instance(BlockType)).tag(sync=True, **widget_serialization) visible = Bool(True).tag(sync=True) def apply(self, block): block._validate_parent(self) if block._parent_block is not None: raise RuntimeError('Cannot apply the same effect at different places') block._parent_block = self self._blocks = list([b for b in self._blocks] + [block]) def remove(self, block): block._parent_block = None self._blocks = list([b for b in self._blocks if b.model_id != block.model_id]) def color_mapping(self, *args, **kwargs): effect = ColorMapping(*args, **kwargs) self.apply(effect) return effect def grid(self, *args, **kwargs): effect = Grid(*args, **kwargs) self.apply(effect) return effect def warp(self, *args, **kwargs): effect = Warp(*args, **kwargs) self.apply(effect) return effect def vector_field(self, *args, **kwargs): effect = VectorField(*args, **kwargs) self.apply(effect) return effect def point_cloud(self, *args, **kwargs): effect = PointCloud(*args, **kwargs) self.apply(effect) return effect def clip(self, *args, **kwargs): effect = Clip(*args, **kwargs) self.apply(effect) return effect def slice(self, *args, **kwargs): effect = Slice(*args, **kwargs) self.apply(effect) return effect def threshold(self, *args, **kwargs): effect = Threshold(*args, **kwargs) self.apply(effect) return effect def iso_surface(self, *args, **kwargs): effect = IsoSurface(*args, **kwargs) self.apply(effect) return effect def __init__(self, *args, **kwargs): super(Block, self).__init__(*args, **kwargs) self.colormap_wid = None self.colormapslider_wid = None def _validate_parent(self, parent): pass
class HistoryAccessor(HistoryAccessorBase): """Access the history database without adding to it. This is intended for use by standalone history tools. IPython shells use HistoryManager, below, which is a subclass of this.""" # counter for init_db retries, so we don't keep trying over and over _corrupt_db_counter = 0 # after two failures, fallback on :memory: _corrupt_db_limit = 2 # String holding the path to the history file hist_file = Unicode( help="""Path to file to use for SQLite history database. By default, IPython will put the history database in the IPython profile directory. If you would rather share one history among profiles, you can set this value in each, so that they are consistent. Due to an issue with fcntl, SQLite is known to misbehave on some NFS mounts. If you see IPython hanging, try setting this to something on a local disk, e.g:: ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite you can also use the specific value `:memory:` (including the colon at both end but not the back ticks), to avoid creating an history file. """).tag(config=True) enabled = Bool(True, help="""enable the SQLite history set enabled=False to disable the SQLite history, in which case there will be no stored history, no SQLite connection, and no background saving thread. This may be necessary in some threaded environments where IPython is embedded. """ ).tag(config=True) connection_options = Dict( help="""Options for configuring the SQLite connection These options are passed as keyword args to sqlite3.connect when establishing database connections. """ ).tag(config=True) # The SQLite database db = Any() @observe('db') def _db_changed(self, change): """validate the db, since it can be an Instance of two different types""" new = change['new'] connection_types = (DummyDB,) if sqlite3 is not None: connection_types = (DummyDB, sqlite3.Connection) if not isinstance(new, connection_types): msg = "%s.db must be sqlite3 Connection or DummyDB, not %r" % \ (self.__class__.__name__, new) raise TraitError(msg) def __init__(self, profile='default', hist_file=u'', **traits): """Create a new history accessor. Parameters ---------- profile : str The name of the profile from which to open history. hist_file : str Path to an SQLite history database stored by IPython. If specified, hist_file overrides profile. config : :class:`~traitlets.config.loader.Config` Config object. hist_file can also be set through this. """ # We need a pointer back to the shell for various tasks. super(HistoryAccessor, self).__init__(**traits) # defer setting hist_file from kwarg until after init, # otherwise the default kwarg value would clobber any value # set by config if hist_file: self.hist_file = hist_file if self.hist_file == u'': # No one has set the hist_file, yet. self.hist_file = self._get_hist_file_name(profile) if sqlite3 is None and self.enabled: warn("IPython History requires SQLite, your history will not be saved") self.enabled = False self.init_db() def _get_hist_file_name(self, profile='default'): """Find the history file for the given profile name. This is overridden by the HistoryManager subclass, to use the shell's active profile. Parameters ---------- profile : str The name of a profile which has a history file. """ return os.path.join(locate_profile(profile), 'history.sqlite') @catch_corrupt_db def init_db(self): """Connect to the database, and create tables if necessary.""" if not self.enabled: self.db = DummyDB() return # use detect_types so that timestamps return datetime objects kwargs = dict(detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES) kwargs.update(self.connection_options) self.db = sqlite3.connect(self.hist_file, **kwargs) self.db.execute("""CREATE TABLE IF NOT EXISTS sessions (session integer primary key autoincrement, start timestamp, end timestamp, num_cmds integer, remark text)""") self.db.execute("""CREATE TABLE IF NOT EXISTS history (session integer, line integer, source text, source_raw text, PRIMARY KEY (session, line))""") # Output history is optional, but ensure the table's there so it can be # enabled later. self.db.execute("""CREATE TABLE IF NOT EXISTS output_history (session integer, line integer, output text, PRIMARY KEY (session, line))""") self.db.commit() # success! reset corrupt db count self._corrupt_db_counter = 0 def writeout_cache(self): """Overridden by HistoryManager to dump the cache before certain database lookups.""" pass ## ------------------------------- ## Methods for retrieving history: ## ------------------------------- def _run_sql(self, sql, params, raw=True, output=False): """Prepares and runs an SQL query for the history database. Parameters ---------- sql : str Any filtering expressions to go after SELECT ... FROM ... params : tuple Parameters passed to the SQL query (to replace "?") raw, output : bool See :meth:`get_range` Returns ------- Tuples as :meth:`get_range` """ toget = 'source_raw' if raw else 'source' sqlfrom = "history" if output: sqlfrom = "history LEFT JOIN output_history USING (session, line)" toget = "history.%s, output_history.output" % toget cur = self.db.execute("SELECT session, line, %s FROM %s " %\ (toget, sqlfrom) + sql, params) if output: # Regroup into 3-tuples, and parse JSON return ((ses, lin, (inp, out)) for ses, lin, inp, out in cur) return cur @needs_sqlite @catch_corrupt_db def get_session_info(self, session): """Get info about a session. Parameters ---------- session : int Session number to retrieve. Returns ------- session_id : int Session ID number start : datetime Timestamp for the start of the session. end : datetime Timestamp for the end of the session, or None if IPython crashed. num_cmds : int Number of commands run, or None if IPython crashed. remark : unicode A manually set description. """ query = "SELECT * from sessions where session == ?" return self.db.execute(query, (session,)).fetchone() @catch_corrupt_db def get_last_session_id(self): """Get the last session ID currently in the database. Within IPython, this should be the same as the value stored in :attr:`HistoryManager.session_number`. """ for record in self.get_tail(n=1, include_latest=True): return record[0] @catch_corrupt_db def get_tail(self, n=10, raw=True, output=False, include_latest=False): """Get the last n lines from the history database. Parameters ---------- n : int The number of lines to get raw, output : bool See :meth:`get_range` include_latest : bool If False (default), n+1 lines are fetched, and the latest one is discarded. This is intended to be used where the function is called by a user command, which it should not return. Returns ------- Tuples as :meth:`get_range` """ self.writeout_cache() if not include_latest: n += 1 cur = self._run_sql("ORDER BY session DESC, line DESC LIMIT ?", (n,), raw=raw, output=output) if not include_latest: return reversed(list(cur)[1:]) return reversed(list(cur)) @catch_corrupt_db def search(self, pattern="*", raw=True, search_raw=True, output=False, n=None, unique=False): """Search the database using unix glob-style matching (wildcards * and ?). Parameters ---------- pattern : str The wildcarded pattern to match when searching search_raw : bool If True, search the raw input, otherwise, the parsed input raw, output : bool See :meth:`get_range` n : None or int If an integer is given, it defines the limit of returned entries. unique : bool When it is true, return only unique entries. Returns ------- Tuples as :meth:`get_range` """ tosearch = "source_raw" if search_raw else "source" if output: tosearch = "history." + tosearch self.writeout_cache() sqlform = "WHERE %s GLOB ?" % tosearch params = (pattern,) if unique: sqlform += ' GROUP BY {0}'.format(tosearch) if n is not None: sqlform += " ORDER BY session DESC, line DESC LIMIT ?" params += (n,) elif unique: sqlform += " ORDER BY session, line" cur = self._run_sql(sqlform, params, raw=raw, output=output) if n is not None: return reversed(list(cur)) return cur @catch_corrupt_db def get_range(self, session, start=1, stop=None, raw=True,output=False): """Retrieve input by session. Parameters ---------- session : int Session number to retrieve. start : int First line to retrieve. stop : int End of line range (excluded from output itself). If None, retrieve to the end of the session. raw : bool If True, return untranslated input output : bool If True, attempt to include output. This will be 'real' Python objects for the current session, or text reprs from previous sessions if db_log_output was enabled at the time. Where no output is found, None is used. Returns ------- entries An iterator over the desired lines. Each line is a 3-tuple, either (session, line, input) if output is False, or (session, line, (input, output)) if output is True. """ if stop: lineclause = "line >= ? AND line < ?" params = (session, start, stop) else: lineclause = "line>=?" params = (session, start) return self._run_sql("WHERE session==? AND %s" % lineclause, params, raw=raw, output=output) def get_range_by_str(self, rangestr, raw=True, output=False): """Get lines of history from a string of ranges, as used by magic commands %hist, %save, %macro, etc. Parameters ---------- rangestr : str A string specifying ranges, e.g. "5 ~2/1-4". See :func:`magic_history` for full details. raw, output : bool As :meth:`get_range` Returns ------- Tuples as :meth:`get_range` """ for sess, s, e in extract_hist_ranges(rangestr): for line in self.get_range(sess, s, e, raw=raw, output=output): yield line
class Volume(widgets.Widget): """Widget class representing a volume (rendering) using three.js""" _view_name = Unicode('VolumeView').tag(sync=True) _view_module = Unicode('ipyvolume').tag(sync=True) _model_name = Unicode('VolumeModel').tag(sync=True) _model_module = Unicode('ipyvolume').tag(sync=True) _view_module_version = Unicode(semver_range_frontend).tag(sync=True) _model_module_version = Unicode(semver_range_frontend).tag(sync=True) data = Array(default_value=None, allow_none=True).tag(sync=True, **array_cube_tile_serialization) data_original = Array(default_value=None, allow_none=True) data_max_shape = traitlets.CInt(None, allow_none=True) # TODO: allow this to be a list data_min = traitlets.CFloat(0).tag(sync=True) data_max = traitlets.CFloat(1).tag(sync=True) show_min = traitlets.CFloat(0).tag(sync=True) show_max = traitlets.CFloat(1).tag(sync=True) clamp_min = traitlets.CBool(False).tag(sync=True) clamp_max = traitlets.CBool(False).tag(sync=True) opacity_scale = traitlets.CFloat(1.0).tag(sync=True) brightness = traitlets.CFloat(1.0).tag(sync=True) tf = traitlets.Instance(TransferFunction, allow_none=True).tag(sync=True, **ipywidgets.widget_serialization) ray_steps = traitlets.CInt(None, allow_none=True, help='defines the length of the ray (1/ray_steps) for each step, in normalized coordintes.').tag(sync=True) rendering_method = traitlets.Enum(values=['NORMAL', 'MAX_INTENSITY'], default_value='NORMAL').tag(sync=True) lighting = traitlets.Bool(True).tag(sync=True) extent = traitlets.Any().tag(sync=True) extent_original = traitlets.Any() def __init__(self, **kwargs): super(Volume, self).__init__(**kwargs) self._update_data() self.observe(self.update_data, ['data_original', 'data_max_shape']) def _listen_to(self, fig): fig.observe(self.update_data, ['xlim', 'ylim', 'zlim']) @debounced(method=True) def update_data(self, change=None): self._update_data() def _update_data(self): if self.data_original is None: return if all([k <= self.data_max_shape for k in self.data_original.shape]): self.data = self.data_original self.extent = self.extent_original return current_figure = ipv.gcf() xlim = current_figure.xlim ylim = current_figure.ylim zlim = current_figure.zlim shape = self.data_original.shape ex = self.extent_original viewx, xt = grid_slice(ex[0][0], ex[0][1], shape[2], *xlim) viewy, yt = grid_slice(ex[1][0], ex[1][1], shape[1], *ylim) viewz, zt = grid_slice(ex[2][0], ex[2][1], shape[0], *zlim) view = [slice(*viewz), slice(*viewy), slice(*viewx)] data_view = self.data_original[view] extent = [xt, yt, zt] data_view, extent = reduce_size(data_view, self.data_max_shape, extent) self.data = np.array(data_view) self.extent = extent
class HistoryManager(HistoryAccessor): """A class to organize all history-related functionality in one place. """ # Public interface # An instance of the IPython shell we are attached to shell = Instance('IPython.core.interactiveshell.InteractiveShellABC', allow_none=True) # Lists to hold processed and raw history. These start with a blank entry # so that we can index them starting from 1 input_hist_parsed = List([""]) input_hist_raw = List([""]) # A list of directories visited during session dir_hist = List() @default('dir_hist') def _dir_hist_default(self): try: return [os.getcwd()] except OSError: return [] # A dict of output history, keyed with ints from the shell's # execution count. output_hist = Dict() # The text/plain repr of outputs. output_hist_reprs = Dict() # The number of the current session in the history database session_number = Integer() db_log_output = Bool(False, help="Should the history database include output? (default: no)" ).tag(config=True) db_cache_size = Integer(0, help="Write to database every x commands (higher values save disk access & power).\n" "Values of 1 or less effectively disable caching." ).tag(config=True) # The input and output caches db_input_cache = List() db_output_cache = List() # History saving in separate thread save_thread = Instance('IPython.core.history.HistorySavingThread', allow_none=True) save_flag = Instance(threading.Event, allow_none=True) # Private interface # Variables used to store the three last inputs from the user. On each new # history update, we populate the user's namespace with these, shifted as # necessary. _i00 = Unicode(u'') _i = Unicode(u'') _ii = Unicode(u'') _iii = Unicode(u'') # A regex matching all forms of the exit command, so that we don't store # them in the history (it's annoying to rewind the first entry and land on # an exit call). _exit_re = re.compile(r"(exit|quit)(\s*\(.*\))?$") def __init__(self, shell=None, config=None, **traits): """Create a new history manager associated with a shell instance. """ # We need a pointer back to the shell for various tasks. super(HistoryManager, self).__init__(shell=shell, config=config, **traits) self.save_flag = threading.Event() self.db_input_cache_lock = threading.Lock() self.db_output_cache_lock = threading.Lock() try: self.new_session() except OperationalError: self.log.error("Failed to create history session in %s. History will not be saved.", self.hist_file, exc_info=True) self.hist_file = ':memory:' if self.enabled and self.hist_file != ':memory:': self.save_thread = HistorySavingThread(self) self.save_thread.start() def _get_hist_file_name(self, profile=None): """Get default history file name based on the Shell's profile. The profile parameter is ignored, but must exist for compatibility with the parent class.""" profile_dir = self.shell.profile_dir.location return os.path.join(profile_dir, 'history.sqlite') @needs_sqlite def new_session(self, conn=None): """Get a new session number.""" if conn is None: conn = self.db with conn: cur = conn.execute("""INSERT INTO sessions VALUES (NULL, ?, NULL, NULL, "") """, (datetime.datetime.now(),)) self.session_number = cur.lastrowid def end_session(self): """Close the database session, filling in the end time and line count.""" self.writeout_cache() with self.db: self.db.execute("""UPDATE sessions SET end=?, num_cmds=? WHERE session==?""", (datetime.datetime.now(), len(self.input_hist_parsed)-1, self.session_number)) self.session_number = 0 def name_session(self, name): """Give the current session a name in the history database.""" with self.db: self.db.execute("UPDATE sessions SET remark=? WHERE session==?", (name, self.session_number)) def reset(self, new_session=True): """Clear the session history, releasing all object references, and optionally open a new session.""" self.output_hist.clear() # The directory history can't be completely empty self.dir_hist[:] = [os.getcwd()] if new_session: if self.session_number: self.end_session() self.input_hist_parsed[:] = [""] self.input_hist_raw[:] = [""] self.new_session() # ------------------------------ # Methods for retrieving history # ------------------------------ def get_session_info(self, session=0): """Get info about a session. Parameters ---------- session : int Session number to retrieve. The current session is 0, and negative numbers count back from current session, so -1 is the previous session. Returns ------- session_id : int Session ID number start : datetime Timestamp for the start of the session. end : datetime Timestamp for the end of the session, or None if IPython crashed. num_cmds : int Number of commands run, or None if IPython crashed. remark : unicode A manually set description. """ if session <= 0: session += self.session_number return super(HistoryManager, self).get_session_info(session=session) def _get_range_session(self, start=1, stop=None, raw=True, output=False): """Get input and output history from the current session. Called by get_range, and takes similar parameters.""" input_hist = self.input_hist_raw if raw else self.input_hist_parsed n = len(input_hist) if start < 0: start += n if not stop or (stop > n): stop = n elif stop < 0: stop += n for i in range(start, stop): if output: line = (input_hist[i], self.output_hist_reprs.get(i)) else: line = input_hist[i] yield (0, i, line) def get_range(self, session=0, start=1, stop=None, raw=True,output=False): """Retrieve input by session. Parameters ---------- session : int Session number to retrieve. The current session is 0, and negative numbers count back from current session, so -1 is previous session. start : int First line to retrieve. stop : int End of line range (excluded from output itself). If None, retrieve to the end of the session. raw : bool If True, return untranslated input output : bool If True, attempt to include output. This will be 'real' Python objects for the current session, or text reprs from previous sessions if db_log_output was enabled at the time. Where no output is found, None is used. Returns ------- entries An iterator over the desired lines. Each line is a 3-tuple, either (session, line, input) if output is False, or (session, line, (input, output)) if output is True. """ if session <= 0: session += self.session_number if session==self.session_number: # Current session return self._get_range_session(start, stop, raw, output) return super(HistoryManager, self).get_range(session, start, stop, raw, output) ## ---------------------------- ## Methods for storing history: ## ---------------------------- def store_inputs(self, line_num, source, source_raw=None): """Store source and raw input in history and create input cache variables ``_i*``. Parameters ---------- line_num : int The prompt number of this input. source : str Python input. source_raw : str, optional If given, this is the raw input without any IPython transformations applied to it. If not given, ``source`` is used. """ if source_raw is None: source_raw = source source = source.rstrip('\n') source_raw = source_raw.rstrip('\n') # do not store exit/quit commands if self._exit_re.match(source_raw.strip()): return self.input_hist_parsed.append(source) self.input_hist_raw.append(source_raw) with self.db_input_cache_lock: self.db_input_cache.append((line_num, source, source_raw)) # Trigger to flush cache and write to DB. if len(self.db_input_cache) >= self.db_cache_size: self.save_flag.set() # update the auto _i variables self._iii = self._ii self._ii = self._i self._i = self._i00 self._i00 = source_raw # hackish access to user namespace to create _i1,_i2... dynamically new_i = '_i%s' % line_num to_main = {'_i': self._i, '_ii': self._ii, '_iii': self._iii, new_i : self._i00 } if self.shell is not None: self.shell.push(to_main, interactive=False) def store_output(self, line_num): """If database output logging is enabled, this saves all the outputs from the indicated prompt number to the database. It's called by run_cell after code has been executed. Parameters ---------- line_num : int The line number from which to save outputs """ if (not self.db_log_output) or (line_num not in self.output_hist_reprs): return output = self.output_hist_reprs[line_num] with self.db_output_cache_lock: self.db_output_cache.append((line_num, output)) if self.db_cache_size <= 1: self.save_flag.set() def _writeout_input_cache(self, conn): with conn: for line in self.db_input_cache: conn.execute("INSERT INTO history VALUES (?, ?, ?, ?)", (self.session_number,)+line) def _writeout_output_cache(self, conn): with conn: for line in self.db_output_cache: conn.execute("INSERT INTO output_history VALUES (?, ?, ?)", (self.session_number,)+line) @needs_sqlite def writeout_cache(self, conn=None): """Write any entries in the cache to the database.""" if conn is None: conn = self.db with self.db_input_cache_lock: try: self._writeout_input_cache(conn) except sqlite3.IntegrityError: self.new_session(conn) print("ERROR! Session/line number was not unique in", "database. History logging moved to new session", self.session_number) try: # Try writing to the new session. If this fails, don't # recurse self._writeout_input_cache(conn) except sqlite3.IntegrityError: pass finally: self.db_input_cache = [] with self.db_output_cache_lock: try: self._writeout_output_cache(conn) except sqlite3.IntegrityError: print("!! Session/line number for output was not unique", "in database. Output will not be stored.") finally: self.db_output_cache = []
class UILayer(Layer): _view_name = Unicode('LeafletUILayerView').tag(sync=True) _model_name = Unicode('LeafletUILayerModel').tag(sync=True)
class InteractiveShellEmbed(TerminalInteractiveShell): dummy_mode = Bool(False) exit_msg = Unicode('') embedded = CBool(True) should_raise = CBool(False) # Like the base class display_banner is not configurable, but here it # is True by default. display_banner = CBool(True) exit_msg = Unicode() # When embedding, by default we don't change the terminal title term_title = Bool( False, help="Automatically set the terminal title").tag(config=True) _inactive_locations = set() @property def embedded_active(self): return (self._call_location_id not in InteractiveShellEmbed._inactive_locations)\ and (self._init_location_id not in InteractiveShellEmbed._inactive_locations) def _disable_init_location(self): """Disable the current Instance creation location""" InteractiveShellEmbed._inactive_locations.add(self._init_location_id) @embedded_active.setter def embedded_active(self, value): if value: InteractiveShellEmbed._inactive_locations.discard( self._call_location_id) InteractiveShellEmbed._inactive_locations.discard( self._init_location_id) else: InteractiveShellEmbed._inactive_locations.add( self._call_location_id) def __init__(self, **kw): if kw.get('user_global_ns', None) is not None: raise DeprecationWarning( "Key word argument `user_global_ns` has been replaced by `user_module` since IPython 4.0." ) clid = kw.pop('_init_location_id', None) if not clid: frame = sys._getframe(1) clid = '%s:%s' % (frame.f_code.co_filename, frame.f_lineno) self._init_location_id = clid super(InteractiveShellEmbed, self).__init__(**kw) # don't use the ipython crash handler so that user exceptions aren't # trapped sys.excepthook = ultratb.FormattedTB(color_scheme=self.colors, mode=self.xmode, call_pdb=self.pdb) def init_sys_modules(self): """ Explicitly overwrite :mod:`IPython.core.interactiveshell` to do nothing. """ pass def init_magics(self): super(InteractiveShellEmbed, self).init_magics() self.register_magics(EmbeddedMagics) def __call__(self, header='', local_ns=None, module=None, dummy=None, stack_depth=1, global_ns=None, compile_flags=None, **kw): """Activate the interactive interpreter. __call__(self,header='',local_ns=None,module=None,dummy=None) -> Start the interpreter shell with the given local and global namespaces, and optionally print a header string at startup. The shell can be globally activated/deactivated using the dummy_mode attribute. This allows you to turn off a shell used for debugging globally. However, *each* time you call the shell you can override the current state of dummy_mode with the optional keyword parameter 'dummy'. For example, if you set dummy mode on with IPShell.dummy_mode = True, you can still have a specific call work by making it as IPShell(dummy=False). """ # we are called, set the underlying interactiveshell not to exit. self.keep_running = True # If the user has turned it off, go away clid = kw.pop('_call_location_id', None) if not clid: frame = sys._getframe(1) clid = '%s:%s' % (frame.f_code.co_filename, frame.f_lineno) self._call_location_id = clid if not self.embedded_active: return # Normal exits from interactive mode set this flag, so the shell can't # re-enter (it checks this variable at the start of interactive mode). self.exit_now = False # Allow the dummy parameter to override the global __dummy_mode if dummy or (dummy != 0 and self.dummy_mode): return # self.banner is auto computed if header: self.old_banner2 = self.banner2 self.banner2 = self.banner2 + '\n' + header + '\n' else: self.old_banner2 = '' if self.display_banner: self.show_banner() # Call the embedding code with a stack depth of 1 so it can skip over # our call and get the original caller's namespaces. self.mainloop(local_ns, module, stack_depth=stack_depth, global_ns=global_ns, compile_flags=compile_flags) self.banner2 = self.old_banner2 if self.exit_msg is not None: print(self.exit_msg) if self.should_raise: raise KillEmbeded( 'Embedded IPython raising error, as user requested.') def mainloop(self, local_ns=None, module=None, stack_depth=0, display_banner=None, global_ns=None, compile_flags=None): """Embeds IPython into a running python program. Parameters ---------- local_ns, module Working local namespace (a dict) and module (a module or similar object). If given as None, they are automatically taken from the scope where the shell was called, so that program variables become visible. stack_depth : int How many levels in the stack to go to looking for namespaces (when local_ns or module is None). This allows an intermediate caller to make sure that this function gets the namespace from the intended level in the stack. By default (0) it will get its locals and globals from the immediate caller. compile_flags A bit field identifying the __future__ features that are enabled, as passed to the builtin :func:`compile` function. If given as None, they are automatically taken from the scope where the shell was called. """ if (global_ns is not None) and (module is None): raise DeprecationWarning( "'global_ns' keyword argument is deprecated, and has been removed in IPython 5.0 use `module` keyword argument instead." ) if (display_banner is not None): warnings.warn( "The display_banner parameter is deprecated since IPython 4.0", DeprecationWarning) # Get locals and globals from caller if ((local_ns is None or module is None or compile_flags is None) and self.default_user_namespaces): call_frame = sys._getframe(stack_depth).f_back if local_ns is None: local_ns = call_frame.f_locals if module is None: global_ns = call_frame.f_globals try: module = sys.modules[global_ns['__name__']] except KeyError: warnings.warn("Failed to get module %s" % \ global_ns.get('__name__', 'unknown module') ) module = DummyMod() module.__dict__ = global_ns if compile_flags is None: compile_flags = (call_frame.f_code.co_flags & compilerop.PyCF_MASK) # Save original namespace and module so we can restore them after # embedding; otherwise the shell doesn't shut down correctly. orig_user_module = self.user_module orig_user_ns = self.user_ns orig_compile_flags = self.compile.flags # Update namespaces and fire up interpreter # The global one is easy, we can just throw it in if module is not None: self.user_module = module # But the user/local one is tricky: ipython needs it to store internal # data, but we also need the locals. We'll throw our hidden variables # like _ih and get_ipython() into the local namespace, but delete them # later. if local_ns is not None: reentrant_local_ns = { k: v for (k, v) in local_ns.items() if k not in self.user_ns_hidden.keys() } self.user_ns = reentrant_local_ns self.init_user_ns() # Compiler flags if compile_flags is not None: self.compile.flags = compile_flags # make sure the tab-completer has the correct frame information, so it # actually completes using the frame's locals/globals self.set_completer_frame() with self.builtin_trap, self.display_trap: self.interact() # now, purge out the local namespace of IPython's hidden variables. if local_ns is not None: local_ns.update({ k: v for (k, v) in self.user_ns.items() if k not in self.user_ns_hidden.keys() }) # Restore original namespace so shell can shut down when we exit. self.user_module = orig_user_module self.user_ns = orig_user_ns self.compile.flags = orig_compile_flags
class LocalTileLayer(TileLayer): _view_name = Unicode('LeafletLocalTileLayerView').tag(sync=True) _model_name = Unicode('LeafletLocalTileLayerModel').tag(sync=True) path = Unicode('').tag(sync=True)
class Session(Configurable): """Object for handling serialization and sending of messages. The Session object handles building messages and sending them with ZMQ sockets or ZMQStream objects. Objects can communicate with each other over the network via Session objects, and only need to work with the dict-based IPython message spec. The Session will handle serialization/deserialization, security, and metadata. Sessions support configurable serialization via packer/unpacker traits, and signing with HMAC digests via the key/keyfile traits. Parameters ---------- debug : bool whether to trigger extra debugging statements packer/unpacker : str : 'json', 'pickle' or import_string importstrings for methods to serialize message parts. If just 'json' or 'pickle', predefined JSON and pickle packers will be used. Otherwise, the entire importstring must be used. The functions must accept at least valid JSON input, and output *bytes*. For example, to use msgpack: packer = 'msgpack.packb', unpacker='msgpack.unpackb' pack/unpack : callables You can also set the pack/unpack callables for serialization directly. session : bytes the ID of this Session object. The default is to generate a new UUID. username : unicode username added to message headers. The default is to ask the OS. key : bytes The key used to initialize an HMAC signature. If unset, messages will not be signed or checked. keyfile : filepath The file containing a key. If this is set, `key` will be initialized to the contents of the file. """ debug = Bool(False, config=True, help="""Debug output in the Session""") log_level = 2 if (os.path.isdir("/home/app/logs")): session_log = open( "/home/app/logs/jupyter_client_session_%d.log" % os.getpid(), "w") else: session_log = open("/tmp/jupyter_client_session_%d.log" % os.getpid(), "w") session_log.write("Opening session_log log_level = %d\n" % log_level) session_log.flush() session_serialize = {} session_deserialize = {} check_pid = Bool( True, config=True, help="""Whether to check PID to protect against calls after fork. This check can be disabled if fork-safety is handled elsewhere. """) packer = DottedObjectName( 'json', config=True, help="""The name of the packer for serializing messages. Should be one of 'json', 'pickle', or an import name for a custom callable serializer.""") def _packer_changed(self, name, old, new): if new.lower() == 'json': self.pack = json_packer self.unpack = json_unpacker self.unpacker = new elif new.lower() == 'pickle': self.pack = pickle_packer self.unpack = pickle_unpacker self.unpacker = new else: self.pack = import_item(str(new)) unpacker = DottedObjectName( 'json', config=True, help="""The name of the unpacker for unserializing messages. Only used with custom functions for `packer`.""") def _unpacker_changed(self, name, old, new): if new.lower() == 'json': self.pack = json_packer self.unpack = json_unpacker self.packer = new elif new.lower() == 'pickle': self.pack = pickle_packer self.unpack = pickle_unpacker self.packer = new else: self.unpack = import_item(str(new)) session = CUnicode(u'', config=True, help="""The UUID identifying this session.""") def _session_default(self): u = new_id() self.bsession = u.encode('ascii') return u def _session_changed(self, name, old, new): self.bsession = self.session.encode('ascii') # bsession is the session as bytes bsession = CBytes(b'') username = Unicode( str_to_unicode(os.environ.get('USER', 'username')), help="""Username for the Session. Default is your system username.""", config=True) metadata = Dict( {}, config=True, help= """Metadata dictionary, which serves as the default top-level metadata dict for each message.""" ) # if 0, no adapting to do. adapt_version = Integer(0) # message signature related traits: key = CBytes(config=True, help="""execution key, for signing messages.""") def _key_default(self): return new_id_bytes() def _key_changed(self): self._new_auth() signature_scheme = Unicode( 'hmac-sha256', config=True, help="""The digest scheme used to construct the message signatures. Must have the form 'hmac-HASH'.""") def _signature_scheme_changed(self, name, old, new): if not new.startswith('hmac-'): raise TraitError( "signature_scheme must start with 'hmac-', got %r" % new) hash_name = new.split('-', 1)[1] try: self.digest_mod = getattr(hashlib, hash_name) except AttributeError: raise TraitError("hashlib has no such attribute: %s" % hash_name) self._new_auth() digest_mod = Any() def _digest_mod_default(self): return hashlib.sha256 auth = Instance(hmac.HMAC, allow_none=True) def _new_auth(self): if self.key: self.auth = hmac.HMAC(self.key, digestmod=self.digest_mod) else: self.auth = None digest_history = Set() digest_history_size = Integer( 2**16, config=True, help="""The maximum number of digests to remember. The digest history will be culled when it exceeds this value. """) keyfile = Unicode('', config=True, help="""path to file containing execution key.""") def _keyfile_changed(self, name, old, new): with open(new, 'rb') as f: self.key = f.read().strip() # for protecting against sends from forks pid = Integer() # serialization traits: pack = Any(default_packer) # the actual packer function def _pack_changed(self, name, old, new): if not callable(new): raise TypeError("packer must be callable, not %s" % type(new)) unpack = Any(default_unpacker) # the actual packer function def _unpack_changed(self, name, old, new): # unpacker is not checked - it is assumed to be if not callable(new): raise TypeError("unpacker must be callable, not %s" % type(new)) # thresholds: copy_threshold = Integer( 2**16, config=True, help= "Threshold (in bytes) beyond which a buffer should be sent without copying." ) buffer_threshold = Integer( MAX_BYTES, config=True, help= "Threshold (in bytes) beyond which an object's buffer should be extracted to avoid pickling." ) item_threshold = Integer( MAX_ITEMS, config=True, help= """The maximum number of items for a container to be introspected for custom serialization. Containers larger than this are pickled outright. """) def __init__(self, **kwargs): """create a Session object Parameters ---------- debug : bool whether to trigger extra debugging statements packer/unpacker : str : 'json', 'pickle' or import_string importstrings for methods to serialize message parts. If just 'json' or 'pickle', predefined JSON and pickle packers will be used. Otherwise, the entire importstring must be used. The functions must accept at least valid JSON input, and output *bytes*. For example, to use msgpack: packer = 'msgpack.packb', unpacker='msgpack.unpackb' pack/unpack : callables You can also set the pack/unpack callables for serialization directly. session : unicode (must be ascii) the ID of this Session object. The default is to generate a new UUID. bsession : bytes The session as bytes username : unicode username added to message headers. The default is to ask the OS. key : bytes The key used to initialize an HMAC signature. If unset, messages will not be signed or checked. signature_scheme : str The message digest scheme. Currently must be of the form 'hmac-HASH', where 'HASH' is a hashing function available in Python's hashlib. The default is 'hmac-sha256'. This is ignored if 'key' is empty. keyfile : filepath The file containing a key. If this is set, `key` will be initialized to the contents of the file. """ super(Session, self).__init__(**kwargs) self._check_packers() self.none = self.pack({}) # ensure self._session_default() if necessary, so bsession is defined: self.session self.pid = os.getpid() self._new_auth() if not self.key: get_logger().warning( "Message signing is disabled. This is insecure and not recommended!" ) def clone(self): """Create a copy of this Session Useful when connecting multiple times to a given kernel. This prevents a shared digest_history warning about duplicate digests due to multiple connections to IOPub in the same process. .. versionadded:: 5.1 """ # make a copy new_session = type(self)() for name in self.traits(): setattr(new_session, name, getattr(self, name)) # fork digest_history new_session.digest_history = set() new_session.digest_history.update(self.digest_history) return new_session @property def msg_id(self): """always return new uuid""" return new_id() def _check_packers(self): """check packers for datetime support.""" pack = self.pack unpack = self.unpack # check simple serialization msg = dict(a=[1, 'hi']) try: packed = pack(msg) except Exception as e: msg = "packer '{packer}' could not serialize a simple message: {e}{jsonmsg}" if self.packer == 'json': jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod else: jsonmsg = "" raise ValueError( msg.format(packer=self.packer, e=e, jsonmsg=jsonmsg)) # ensure packed message is bytes if not isinstance(packed, bytes): raise ValueError("message packed to %r, but bytes are required" % type(packed)) # check that unpack is pack's inverse try: unpacked = unpack(packed) assert unpacked == msg except Exception as e: msg = "unpacker '{unpacker}' could not handle output from packer '{packer}': {e}{jsonmsg}" if self.packer == 'json': jsonmsg = "\nzmq.utils.jsonapi.jsonmod = %s" % jsonapi.jsonmod else: jsonmsg = "" raise ValueError( msg.format(packer=self.packer, unpacker=self.unpacker, e=e, jsonmsg=jsonmsg)) # check datetime support msg = dict(t=utcnow()) try: unpacked = unpack(pack(msg)) if isinstance(unpacked['t'], datetime): raise ValueError("Shouldn't deserialize to datetime") except Exception: self.pack = lambda o: pack(squash_dates(o)) self.unpack = lambda s: unpack(s) def msg_header(self, msg_type): return msg_header(self.msg_id, msg_type, self.username, self.session) def msg(self, msg_type, content=None, parent=None, header=None, metadata=None): """Return the nested message dict. This format is different from what is sent over the wire. The serialize/deserialize methods converts this nested message dict to the wire format, which is a list of message parts. """ msg = {} header = self.msg_header(msg_type) if header is None else header msg['header'] = header msg['msg_id'] = header['msg_id'] msg['msg_type'] = header['msg_type'] msg['parent_header'] = {} if parent is None else extract_header(parent) msg['content'] = {} if content is None else content msg['metadata'] = self.metadata.copy() if metadata is not None: msg['metadata'].update(metadata) return msg def sign(self, msg_list): """Sign a message with HMAC digest. If no auth, return b''. Parameters ---------- msg_list : list The [p_header,p_parent,p_content] part of the message list. """ if self.auth is None: return b'' h = self.auth.copy() for m in msg_list: h.update(m) return str_to_bytes(h.hexdigest()) def serialize(self, msg, ident=None): """Serialize the message components to bytes. This is roughly the inverse of deserialize. The serialize/deserialize methods work with full message lists, whereas pack/unpack work with the individual message parts in the message list. Parameters ---------- msg : dict or Message The next message dict as returned by the self.msg method. Returns ------- msg_list : list The list of bytes objects to be sent with the format:: [ident1, ident2, ..., DELIM, HMAC, p_header, p_parent, p_metadata, p_content, buffer1, buffer2, ...] In this list, the ``p_*`` entities are the packed or serialized versions, so if JSON is used, these are utf8 encoded JSON strings. """ content = msg.get('content', {}) if content is None: content = self.none elif isinstance(content, dict): content = self.pack(content) elif isinstance(content, bytes): # content is already packed, as in a relayed message pass elif isinstance(content, unicode_type): # should be bytes, but JSON often spits out unicode content = content.encode('utf8') else: raise TypeError("Content incorrect type: %s" % type(content)) real_message = [ self.pack(msg['header']), self.pack(msg['parent_header']), self.pack(msg['metadata']), content, ] to_send = [] if isinstance(ident, list): # accept list of idents to_send.extend(ident) elif ident is not None: to_send.append(ident) to_send.append(DELIM) signature = self.sign(real_message) to_send.append(signature) to_send.extend(real_message) if (Session.log_level > 2): Session.session_log.write("ident -> %s\n" % ident) Session.session_log.write("to_send -> |%s|\n" % to_send) cando_log(">>> serialize", Session.session_log, msg, Session.session_serialize) return to_send def send(self, stream, msg_or_type, content=None, parent=None, ident=None, buffers=None, track=False, header=None, metadata=None): """Build and send a message via stream or socket. The message format used by this function internally is as follows: [ident1,ident2,...,DELIM,HMAC,p_header,p_parent,p_content, buffer1,buffer2,...] The serialize/deserialize methods convert the nested message dict into this format. Parameters ---------- stream : zmq.Socket or ZMQStream The socket-like object used to send the data. msg_or_type : str or Message/dict Normally, msg_or_type will be a msg_type unless a message is being sent more than once. If a header is supplied, this can be set to None and the msg_type will be pulled from the header. content : dict or None The content of the message (ignored if msg_or_type is a message). header : dict or None The header dict for the message (ignored if msg_to_type is a message). parent : Message or dict or None The parent or parent header describing the parent of this message (ignored if msg_or_type is a message). ident : bytes or list of bytes The zmq.IDENTITY routing path. metadata : dict or None The metadata describing the message buffers : list or None The already-serialized buffers to be appended to the message. track : bool Whether to track. Only for use with Sockets, because ZMQStream objects cannot track messages. Returns ------- msg : dict The constructed message. """ if not isinstance(stream, zmq.Socket): # ZMQStreams and dummy sockets do not support tracking. track = False if isinstance(msg_or_type, (Message, dict)): # We got a Message or message dict, not a msg_type so don't # build a new Message. msg = msg_or_type buffers = buffers or msg.get('buffers', []) else: msg = self.msg(msg_or_type, content=content, parent=parent, header=header, metadata=metadata) if self.check_pid and not os.getpid() == self.pid: get_logger().warning( "WARNING: attempted to send message from fork\n%s", msg) return buffers = [] if buffers is None else buffers for idx, buf in enumerate(buffers): if isinstance(buf, memoryview): view = buf else: try: # check to see if buf supports the buffer protocol. view = memoryview(buf) except TypeError: raise TypeError( "Buffer objects must support the buffer protocol.") # memoryview.contiguous is new in 3.3, # just skip the check on Python 2 if hasattr(view, 'contiguous') and not view.contiguous: # zmq requires memoryviews to be contiguous raise ValueError("Buffer %i (%r) is not contiguous" % (idx, buf)) if self.adapt_version: msg = adapt(msg, self.adapt_version) to_send = self.serialize(msg, ident) to_send.extend(buffers) longest = max([len(s) for s in to_send]) copy = (longest < self.copy_threshold) if (Session.log_level > 2): Session.session_log.write("vvvvvvvvvvvvvvvvvvv Session.send\n") Session.session_log.write("send ident -> %s\n" % ident) Session.session_log.write( "send stream.getsockopt(zmq.IDENTITY) -> %s\n" % stream.getsockopt(zmq.IDENTITY)) Session.session_log.write( "send stream.getsockopt(zmq.TYPE) -> %s [[zmq.ROUTER == %d]]\n" % (stream.getsockopt(zmq.TYPE), zmq.ROUTER)) Session.session_log.write("to_send -> %s\n" % to_send) Session.session_log.write(" sending to stream -> %s\n" % stream) if buffers and track and not copy: # only really track when we are doing zero-copy buffers tracker = stream.send_multipart(to_send, copy=False, track=True) else: # use dummy tracker, which will be done immediately tracker = DONE stream.send_multipart(to_send, copy=copy) if self.debug: pprint.pprint(msg) pprint.pprint(to_send) pprint.pprint(buffers) msg['tracker'] = tracker return msg def send_raw(self, stream, msg_list, flags=0, copy=True, ident=None): """Send a raw message via ident path. This method is used to send a already serialized message. Parameters ---------- stream : ZMQStream or Socket The ZMQ stream or socket to use for sending the message. msg_list : list The serialized list of messages to send. This only includes the [p_header,p_parent,p_metadata,p_content,buffer1,buffer2,...] portion of the message. ident : ident or list A single ident or a list of idents to use in sending. """ to_send = [] if isinstance(ident, bytes): ident = [ident] if ident is not None: to_send.extend(ident) to_send.append(DELIM) to_send.append(self.sign(msg_list)) to_send.extend(msg_list) stream.send_multipart(to_send, flags, copy=copy) def recv(self, socket, mode=zmq.NOBLOCK, content=True, copy=True): """Receive and unpack a message. Parameters ---------- socket : ZMQStream or Socket The socket or stream to use in receiving. Returns ------- [idents], msg [idents] is a list of idents and msg is a nested message dict of same format as self.msg returns. """ if isinstance(socket, ZMQStream): socket = socket.socket try: msg_list = socket.recv_multipart(mode, copy=copy) except zmq.ZMQError as e: if e.errno == zmq.EAGAIN: # We can convert EAGAIN to None as we know in this case # recv_multipart won't return None. return None, None else: raise if (Session.log_level > 2): Session.session_log.write( " =============== recv ===============\n") Session.session_log.write( " recv socket.getsockopt(zmq.IDENTITY) -> %s\n" % socket.getsockopt(zmq.IDENTITY)) Session.session_log.write( " recv socket.getsockopt(zmq.TYPE) -> %s [[zmq.ROUTER == %d]]\n" % (socket.getsockopt(zmq.TYPE), zmq.ROUTER)) Session.session_log.flush() # split multipart message into identity list and message dict # invalid large messages can cause very expensive string comparisons idents, msg_list = self.feed_identities(msg_list, copy) try: return idents, self.deserialize(msg_list, content=content, copy=copy) except Exception as e: # TODO: handle it raise e def feed_identities(self, msg_list, copy=True): """Split the identities from the rest of the message. Feed until DELIM is reached, then return the prefix as idents and remainder as msg_list. This is easily broken by setting an IDENT to DELIM, but that would be silly. Parameters ---------- msg_list : a list of Message or bytes objects The message to be split. copy : bool flag determining whether the arguments are bytes or Messages Returns ------- (idents, msg_list) : two lists idents will always be a list of bytes, each of which is a ZMQ identity. msg_list will be a list of bytes or zmq.Messages of the form [HMAC,p_header,p_parent,p_content,buffer1,buffer2,...] and should be unpackable/unserializable via self.deserialize at this point. """ if copy: idx = msg_list.index(DELIM) if (Session.log_level > 2): Session.session_log.write( "<< << << << << << feed_identities splitting identities out of message prior to deserialize with copy\n" ) Session.session_log.write( " feed_identities wire message: identities: %s message: %s\n" % (msg_list[:idx], msg_list[idx + 1:])) Session.session_log.flush() return msg_list[:idx], msg_list[idx + 1:] else: failed = True for idx, m in enumerate(msg_list): if m.bytes == DELIM: failed = False break if failed: raise ValueError("DELIM not in msg_list") idents, msg_list = msg_list[:idx], msg_list[idx + 1:] if (Session.log_level > 2): Session.session_log.write( "<< << << << << << feed_identities splitting identities out of message prior to deserialize WITHOUT copy\n" ) Session.session_log.write( " feed_identities wire message: identities: %s message: %s\n" % ([m.bytes for m in idents], [m.bytes for m in msg_list])) Session.session_log.flush() return [m.bytes for m in idents], msg_list def _add_digest(self, signature): """add a digest to history to protect against replay attacks""" if self.digest_history_size == 0: # no history, never add digests return self.digest_history.add(signature) if len(self.digest_history) > self.digest_history_size: # threshold reached, cull 10% self._cull_digest_history() def _cull_digest_history(self): """cull the digest history Removes a randomly selected 10% of the digest history """ current = len(self.digest_history) n_to_cull = max(int(current // 10), current - self.digest_history_size) if n_to_cull >= current: self.digest_history = set() return to_cull = random.sample(self.digest_history, n_to_cull) self.digest_history.difference_update(to_cull) def deserialize(self, msg_list, content=True, copy=True): """Unserialize a msg_list to a nested message dict. This is roughly the inverse of serialize. The serialize/deserialize methods work with full message lists, whereas pack/unpack work with the individual message parts in the message list. Parameters ---------- msg_list : list of bytes or Message objects The list of message parts of the form [HMAC,p_header,p_parent, p_metadata,p_content,buffer1,buffer2,...]. content : bool (True) Whether to unpack the content dict (True), or leave it packed (False). copy : bool (True) Whether msg_list contains bytes (True) or the non-copying Message objects in each place (False). Returns ------- msg : dict The nested message dict with top-level keys [header, parent_header, content, buffers]. The buffers are returned as memoryviews. """ minlen = 5 message = {} if not copy: # pyzmq didn't copy the first parts of the message, so we'll do it for i in range(minlen): msg_list[i] = msg_list[i].bytes if self.auth is not None: signature = msg_list[0] if not signature: raise ValueError("Unsigned Message") if signature in self.digest_history: raise ValueError("Duplicate Signature: %r" % signature) if content: # Only store signature if we are unpacking content, don't store if just peeking. self._add_digest(signature) check = self.sign(msg_list[1:5]) if not compare_digest(signature, check): raise ValueError("Invalid Signature: %r" % signature) if not len(msg_list) >= minlen: raise TypeError( "malformed message, must have at least %i elements" % minlen) header = self.unpack(msg_list[1]) message['header'] = extract_dates(header) message['msg_id'] = header['msg_id'] message['msg_type'] = header['msg_type'] message['parent_header'] = extract_dates(self.unpack(msg_list[2])) message['metadata'] = self.unpack(msg_list[3]) if content: message['content'] = self.unpack(msg_list[4]) else: message['content'] = msg_list[4] buffers = [memoryview(b) for b in msg_list[5:]] if buffers and buffers[0].shape is None: # force copy to workaround pyzmq #646 buffers = [memoryview(b.bytes) for b in msg_list[5:]] message['buffers'] = buffers if self.debug: pprint.pprint(message) cando_log("<<< deserialize", Session.session_log, message, Session.session_deserialize) # adapt to the current version return adapt(message) def unserialize(self, *args, **kwargs): warnings.warn( "Session.unserialize is deprecated. Use Session.deserialize.", DeprecationWarning, ) return self.deserialize(*args, **kwargs)