def output_defaults_module(filename, defaults): output = serialize_json(defaults, sort_keys=True, indent=4, separators=[",", ":"]) coffee_template = """ all_defaults = %s; get_defaults = (name) -> if name of all_defaults all_defaults[name] else null all_view_model_names = () -> Object.keys(all_defaults) module.exports = { get_defaults: get_defaults all_view_model_names: all_view_model_names } """ try: os.makedirs(os.path.dirname(filename)) except OSError as e: pass f = codecs.open(filename, "w", "utf-8") f.write(coffee_template % output) f.close() print("Wrote %s with %d model classes" % (filename, len(defaults)))
def create_json_patch_string(self, events): ''' Create a JSON string describing a patch to be applied with apply_json_patch_string() Args: events : list of events to be translated into patches Returns: str : JSON string which can be applied to make the given updates to obj ''' references = set() json_events = [] for event in events: if event.document is not self: raise ValueError("Cannot create a patch using events from a different document " + repr(event)) if isinstance(event, ModelChangedEvent): value = event.new # the new value is an object that may have # not-yet-in-the-remote-doc references, and may also # itself not be in the remote doc yet. the remote may # already have some of the references, but # unfortunately we don't have an easy way to know # unless we were to check BEFORE the attr gets changed # (we need the old _all_models before setting the # property). So we have to send all the references the # remote could need, even though it could be inefficient. # If it turns out we need to fix this we could probably # do it by adding some complexity. value_refs = set(Model.collect_models(value)) # we know we don't want a whole new copy of the obj we're patching # unless it's also the new value if event.model != value: value_refs.discard(event.model) references = references.union(value_refs) json_events.append({ 'kind' : 'ModelChanged', 'model' : event.model.ref, 'attr' : event.attr, 'new' : value }) elif isinstance(event, RootAddedEvent): references = references.union(event.model.references()) json_events.append({ 'kind' : 'RootAdded', 'model' : event.model.ref }) elif isinstance(event, RootRemovedEvent): json_events.append({ 'kind' : 'RootRemoved', 'model' : event.model.ref }) elif isinstance(event, TitleChangedEvent): json_events.append({ 'kind' : 'TitleChanged', 'title' : event.title }) json = { 'events' : json_events, 'references' : self._references_json(references) } return serialize_json(json)
def create_json_patch_string(self, events): ''' Create a JSON string describing a patch to be applied with apply_json_patch_string() Args: events : list of events to be translated into patches Returns: str : JSON string which can be applied to make the given updates to obj ''' references = set() json_events = [] for event in events: if event.document is not self: raise ValueError("Cannot create a patch using events from a different document " + repr(event)) if isinstance(event, ModelChangedEvent): value = event.new # the new value is an object that may have # not-yet-in-the-remote-doc references, and may also # itself not be in the remote doc yet. the remote may # already have some of the references, but # unfortunately we don't have an easy way to know # unless we were to check BEFORE the attr gets changed # (we need the old _all_models before setting the # property). So we have to send all the references the # remote could need, even though it could be inefficient. # If it turns out we need to fix this we could probably # do it by adding some complexity. value_refs = set(PlotObject.collect_plot_objects(value)) # we know we don't want a whole new copy of the obj we're patching # unless it's also the new value if event.model != value: value_refs.discard(event.model) references = references.union(value_refs) json_events.append({ 'kind' : 'ModelChanged', 'model' : event.model.ref, 'attr' : event.attr, 'new' : value }) elif isinstance(event, RootAddedEvent): references = references.union(event.model.references()) json_events.append({ 'kind' : 'RootAdded', 'model' : event.model.ref }) elif isinstance(event, RootRemovedEvent): json_events.append({ 'kind' : 'RootRemoved', 'model' : event.model.ref }) json = { 'events' : json_events, 'references' : self._references_json(references) } return serialize_json(json)
def to_json_string(self): ''' Convert the document to a JSON string. ''' root_ids = [] for r in self._roots: root_ids.append(r._id) root_references = self._all_models.values() json = { 'roots' : { 'root_ids' : root_ids, 'references' : self._references_json(root_references) } } return serialize_json(json)
def to_json_string(self): ''' Convert the document to a JSON string. ''' root_ids = [] for r in self._roots: root_ids.append(r._id) root_references = self._all_models.values() json = { 'title' : self.title, 'roots' : { 'root_ids' : root_ids, 'references' : self._references_json(root_references) } } return serialize_json(json)
def run(self): model_path = self.arguments[0] module_name, model_name = model_path.rsplit(".", 1) try: module = importlib.import_module(module_name) except ImportError: raise SphinxError( "Unable to generate reference docs for %s, couldn't import module '%s'" % (model_path, module_name)) model = getattr(module, model_name, None) if model is None: raise SphinxError( "Unable to generate reference docs for %s, no model '%s' in %s" % (model_path, model_name, module_name)) if type(model) != Viewable: raise SphinxError( "Unable to generate reference docs for %s, model '%s' is not a subclass of Viewable" % (model_path, model_name)) model_obj = model() model_json = json.dumps(json.loads( serialize_json(model_obj.vm_serialize(False))), sort_keys=True, indent=2, separators=(',', ': ')) rst_text = MODEL_TEMPLATE.render( model_path=model_path, model_json=model_json, ) result = ViewList() for line in rst_text.split("\n"): result.append(line, "<bokeh-model>") node = nodes.paragraph() node.document = self.state.document nested_parse_with_titles(self.state, result, node) return node.children
def run(self): model_path = self.arguments[0] module_name, model_name = model_path.rsplit(".", 1) try: module = importlib.import_module(module_name) except ImportError: raise SphinxError("Unable to generate reference docs for %s, couldn't import module '%s'" % (model_path, module_name)) model = getattr(module, model_name, None) if model is None: raise SphinxError("Unable to generate reference docs for %s, no model '%s' in %s" % (model_path, model_name, module_name)) if type(model) != Viewable: raise SphinxError("Unable to generate reference docs for %s, model '%s' is not a subclass of Viewable" % (model_path, model_name)) model_obj = model() model_json = json.dumps( json.loads(serialize_json(model_obj.vm_serialize(False))), sort_keys=True, indent=2, separators=(',', ': ') ) rst_text = MODEL_TEMPLATE.render( model_path=model_path, model_json=model_json, ) result = ViewList() for line in rst_text.split("\n"): result.append(line, "<bokeh-model>") node = nodes.paragraph() node.document = self.state.document nested_parse_with_titles(self.state, result, node) return node.children
return [leaf] + leaves(tail, underneath) all_json = {} for leaf in leaves(all_tree, model_class): klass = leaf[0] vm_name = klass.__view_model__ if vm_name in all_json: continue defaults = {} instance = klass() for name, default in instance.properties_with_values().items(): if isinstance(default, Model): ref = default.ref raw_attrs = default._to_json_like(include_defaults=True) attrs = loads(serialize_json(raw_attrs, sort_keys=True)) ref["attributes"] = attrs del ref["id"] # there's no way the ID will match coffee default = ref elif isinstance(default, float) and default == float("inf"): default = None defaults[name] = default all_json[vm_name] = defaults widgets_json = {} for leaf_widget in leaves(all_tree, widget_class): klass = leaf_widget[0] vm_name = klass.__view_model__ if vm_name not in widgets_json: widgets_json[vm_name] = all_json[vm_name] del all_json[vm_name]