def multi_constructor_obj(loader, tag_suffix, node): """ Callback used by PyYAML when a "!obj:" tag is encountered. See PyYAML documentation for details on the call signature. """ yaml_src = yaml.serialize(node) construct_mapping(node) mapping = loader.construct_mapping(node) assert hasattr(mapping, 'keys') assert hasattr(mapping, 'values') for key in mapping.keys(): if not isinstance(key, basestring): message = "Received non string object (%s) as " \ "key in mapping." % str(key) raise TypeError(message) if '.' not in tag_suffix: # TODO: I'm not sure how this was ever working without eval(). callable = eval(tag_suffix) else: callable = try_to_import(tag_suffix) rval = Proxy(callable=callable, yaml_src=yaml_src, positionals=(), keywords=mapping) return rval
def _multi_constructor_obj(loader, tag_suffix, node): """ Notes ----- Taken (with minor changes) from `Pylearn2`_. .. _Pylearn2: \ http://github.com/lisa-lab/pylearn2/blob/master/pylearn2/config/yaml_parse.py """ yaml_src = yaml.serialize(node) _construct_mapping(node) mapping = loader.construct_mapping(node) assert hasattr(mapping, 'keys') assert hasattr(mapping, 'values') for key in mapping.keys(): if not isinstance(key, six.string_types): raise TypeError( "Received non string object ({0}) as key in mapping.".format( str(key))) if '.' not in tag_suffix: # I'm not sure how this was ever working without eval(). callable = eval(tag_suffix) else: callable = _try_to_import(tag_suffix) rval = _Proxy(callable=callable, yaml_src=yaml_src, positionals=(), keywords=mapping) return rval
def execute(path, content=None): if not content: with open(path) as f: content = f.read() doc = parse(path, content, ParseContext()) result = doc.execute() return yaml.serialize(result, allow_unicode=True)
def multi_constructor(loader, tag_suffix, node) : """ Constructor function passed to PyYAML telling it how to construct objects from argument descriptions. See PyYAML documentation for details on the call signature. """ yaml_src = yaml.serialize(node) mapping = loader.construct_mapping(node) if '.' not in tag_suffix: classname = tag_suffix rval = ObjectProxy(classname, mapping, yaml_src) else: components = tag_suffix.split('.') modulename = '.'.join(components[:-1]) try: exec('import %s' % modulename) except ImportError, e: raise ImportError("Could not import "+modulename+". python wanted to phrase this as: "+str(e)) try: classname = eval(tag_suffix) except AttributeError: raise AttributeError('Could not evaluate %s' % tag_suffix) rval = ObjectProxy(classname, mapping, yaml_src)
def write(self): self._lazy_load() content = yaml.serialize(self._root, stream=None, Dumper=CourseBuilderYamlFormatter) with open(self._name, 'w') as fp: fp.write(content)
def multi_constructor_obj(loader, tag_suffix, node): """ Callback used by PyYAML when a "!obj:" tag is encountered. See PyYAML documentation for details on the call signature. """ yaml_src = yaml.serialize(node) construct_mapping(node) mapping = loader.construct_mapping(node) assert hasattr(mapping, "keys") assert hasattr(mapping, "values") for key in mapping.keys(): if not isinstance(key, basestring): message = "Received non string object (%s) as " "key in mapping." % str(key) raise TypeError(message) if "." not in tag_suffix: classname = tag_suffix rval = ObjectProxy(classname, mapping, yaml_src) else: classname = try_to_import(tag_suffix) rval = ObjectProxy(classname, mapping, yaml_src) return rval
def dump(data): import yaml r = yaml.representer.SafeRepresenter() node = r.represent_data(data) partial_resort(node) escape_node_strings(node) return yaml.serialize(node)
def multi_constructor_obj(loader, tag_suffix, node): """ Callback used by PyYAML when a "!obj:" tag is encountered. See PyYAML documentation for details on the call signature. """ yaml_src = yaml.serialize(node) construct_mapping(node) mapping = loader.construct_mapping(node) assert hasattr(mapping, 'keys') assert hasattr(mapping, 'values') for key in mapping.keys(): if not isinstance(key, basestring): message = "Received non string object (%s) as " \ "key in mapping." % str(key) raise TypeError(message) if '.' not in tag_suffix: classname = tag_suffix rval = ObjectProxy(classname, mapping, yaml_src) else: classname = try_to_import(tag_suffix) rval = ObjectProxy(classname, mapping, yaml_src) return rval
def multi_constructor_obj(loader, tag_suffix, node): """ Converts "key: !obj:python.path { }," to "key: { __obj__: python.path }". """ yaml_src = yaml.serialize(node) mapping = loader.construct_mapping(node) mapping['__obj__'] = tag_suffix return mapping
def multi_constructor_import(loader, tag_suffix, node): yaml_src = yaml.serialize(node) mapping = loader.construct_mapping(node) if '.' not in tag_suffix: raise yaml.YAMLError("import tag suffix contains no '.'") else: rval = try_to_import(tag_suffix) return rval
def constructor_int(loader, node): """ Callback used by PyYAML when a "!int <str>" tag is encountered. This tag expects a (quoted) string as argument. """ yaml_src = yaml.serialize(node) value = loader.construct_scalar(node) return Proxy(callable=delayed_constructor_int, yaml_src=yaml_src, positionals=(), keywords={'value': value})
def constructor_setting(loader, node): """Callback used by PyYAML when a "!setting" tag is encountered. See PyYAML documentation for details on the call signature. """ yaml_src = yaml.serialize(node) value = loader.construct_scalar(node) proxy = Proxy(callable=Setting, keywords={"name": value}, yaml_src=yaml_src) return proxy
def read_configuration(cls, config_file): """read YAML configuration file""" # load YAML events/measurements definition f = open(config_file, 'r') doc_yaml = yaml.compose(f) f.close() # split events & measurements definitions measurements, events = list(), list() for key, value in doc_yaml.value: if value.tag == Measurements.yaml_tag: measurements.append((key, value)) if value.tag == Events.yaml_tag: events.append((key, value)) measurements_yaml = yaml.MappingNode(u'tag:yaml.org,2002:map', measurements) measurements_stream = yaml.serialize(measurements_yaml) events_yaml = yaml.MappingNode(u'tag:yaml.org,2002:map', events) events_stream = yaml.serialize(events_yaml) # return event & measurements definition return events_stream, measurements_stream
def constructor_range(loader, node): """ Callback used by PyYAML when a "!range <str>" tag is encountered. This tag expects a (quoted) string as argument. """ yaml_src = yaml.serialize(node) mseq = MultiSeq.get_instance() value = loader.construct_scalar(node) rng_var = mseq.add_range_var(value) return Proxy(callable=MultiSeq.get_value, yaml_src=yaml_src, positionals=(), keywords={'rng_var': rng_var})
def query_note_nodes(nodes, opts): data = yaml.load(yaml.serialize(nodes)) defaults = data.get('defaults', None) top_map = {k.value: v.value for k, v in nodes.value} note_nodes = top_map['notes'] notes = [extract_note(note_node, defaults) for note_node in note_nodes] notes_df = pandas.DataFrame(notes) if opts.query: query_results = notes_df.query(opts.query) else: query_results = notes_df return query_results, defaults, data, note_nodes, notes
def construct_include(loader: Loader, node: yaml.Node) -> Any: """Include file referenced at node.""" if not isinstance(node, yaml.ScalarNode): include_node_str = yaml.serialize(node).strip() message = f"Include tag value is not a scalar: {include_node_str}" raise BadConfigIncludeError(message) include_file_path = str(loader.construct_scalar(node)) relative_path = os.path.join(loader.root, include_file_path) full_path = os.path.abspath(relative_path) with open(full_path) as f: return yaml.load(f, Loader)
def replace_parameter_values(merged_resource_documents, parameters_to_replace): """Replace parameter values with values in parameters_to_replace""" yaml_string = yaml.serialize(merged_resource_documents) for parameter_iter in parameters_to_replace: if yaml_string.find( "'[parameters(''{0}'')]'".format(parameter_iter)) >= 0: yaml_string = yaml_string.replace( "'[parameters(''{0}'')]'".format(parameter_iter), parameters_to_replace[parameter_iter]) yaml_dict = yaml.compose(yaml_string) return yaml_dict
def multi_constructor_pkl(loader, tag_suffix, node): """ Callback used by PyYAML when a "!pkl:" tag is encountered. """ global additional_environ if tag_suffix != "" and tag_suffix != u"": raise AssertionError('Expected tag_suffix to be "" but it is "' + tag_suffix + '": Put space between !pkl: and the filename.') mapping = loader.construct_yaml_str(node) rval = ObjectProxy(None, {}, yaml.serialize(node)) rval.instance = serial.load(preprocess(mapping, additional_environ)) return rval
def multi_constructor_obj(loader, tag_suffix, node): """Callback used by PyYAML when a "!obj:" tag is encountered. See PyYAML documentation for details on the call signature. """ yaml_src = yaml.serialize(node) mapping = loader.construct_mapping(node) if '.' not in tag_suffix: callable = eval(tag_suffix) else: callable = try_to_import(tag_suffix) proxy = Proxy(callable=callable,keywords=mapping,yaml_src=yaml_src) return proxy
def multi_constructor_obj(loader, tag_suffix, node): """Callback used by PyYAML when a "!obj:" tag is encountered. See PyYAML documentation for details on the call signature. """ yaml_src = yaml.serialize(node) mapping = loader.construct_mapping(node) if '.' not in tag_suffix: callable = eval(tag_suffix) else: callable = try_to_import(tag_suffix) proxy = Proxy(callable=callable, keywords=mapping, yaml_src=yaml_src) return proxy
def test_dump_1(self): """ generate node object for 1 task """ m = "This is a task" tasklist_orig = dpcore.load_tasks(m) nodelist = dpcore.yamlnodedump_tasks(tasklist_orig[0]) yml = yaml.serialize(nodelist) #print yml c = yaml.load(yml) tasklist_comp = dpcore.load_tasks(c) self.assertEqual(tasklist_comp[0].task, tasklist_orig[0].task) self.assertEqual(tasklist_comp[0].note, tasklist_orig[0].note)
def multi_constructor_pkl(loader, tag_suffix, node): """ Callback used by PyYAML when a "!pkl:" tag is encountered. """ global additional_environ if tag_suffix != "" and tag_suffix != u"": raise AssertionError('Expected tag_suffix to be "" but it is "' + tag_suffix + '": Put space between !pkl: and the filename.') mapping = loader.construct_yaml_str(node) obj = serial.load(preprocess(mapping, additional_environ)) proxy = Proxy(callable=do_not_recurse, positionals=(), keywords={'value': obj}, yaml_src=yaml.serialize(node)) return proxy
def multi_constructor_pkl(loader, tag_suffix, node): """ Constructor function passed to PyYAML telling it how to load objects from paths to .pkl files. See PyYAML documentation for details on the call signature. """ #print dir(loader) mapping = loader.construct_yaml_str(node) assert tag_suffix == "" rval = ObjectProxy(None, {}, yaml.serialize(node)) rval.instance = serial.load(mapping) return rval
def multi_constructor_pkl(loader, tag_suffix, node): """ Constructor function passed to PyYAML telling it how to load objects from paths to .pkl files. See PyYAML documentation for details on the call signature. """ #print dir(loader) mapping = loader.construct_yaml_str(node) assert tag_suffix == "" rval = ObjectProxy(None,{},yaml.serialize(node)) rval.instance = serial.load(mapping) return rval
def test_dump_1(self): """ generate node object for 1 story """ m = "This is a story" storylist_orig = dpcore.load_stories(m) nodelist = dpcore.yamlnodedump_stories(storylist_orig[0]) yml = yaml.serialize(nodelist) #print yml c = yaml.load(yml) storylist_comp = dpcore.load_stories(c) self.assertEqual(storylist_comp[0].story, storylist_orig[0].story) self.assertEqual(storylist_comp[0].note, storylist_orig[0].note)
def multi_constructor_pkl(loader, tag_suffix, node): """ Constructor function passed to PyYAML telling it how to load objects from paths to .pkl files. See PyYAML documentation for details on the call signature. """ mapping = loader.construct_yaml_str(node) if tag_suffix != "" and tag_suffix != u"": raise AssertionError('Expected tag_suffix to be "" but it is "'+tag_suffix+'"') rval = ObjectProxy(None, {}, yaml.serialize(node)) rval.instance = serial.load(mapping) return rval
def test_dump_1(self): """ generate node object for 1 log """ m = "This is a log record" loglist_orig = dpcore.load_logs(m) nodelist = dpcore.yamlnodedump_logs(loglist_orig[0]) yml = yaml.serialize(nodelist) #print yml c = yaml.load(yml) loglist_comp = dpcore.load_logs(c) self.assertEqual(loglist_comp[0].log, loglist_orig[0].log) self.assertEqual(loglist_comp[0].action, loglist_orig[0].action)
def multi_constructor_obj(loader, tag_suffix, node): """ Callback used by PyYAML when a "!obj:" tag is encountered. See PyYAML documentation for details on the call signature. """ yaml_src = yaml.serialize(node) mapping = loader.construct_mapping(node) if '.' not in tag_suffix: classname = tag_suffix rval = ObjectProxy(classname, mapping, yaml_src) else: classname = try_to_import(tag_suffix) rval = ObjectProxy(classname, mapping, yaml_src) return rval
def multi_constructor(loader, tag_suffix, node): """ Constructor function passed to PyYAML telling it how to construct objects from argument descriptions. See PyYAML documentation for details on the call signature. """ yaml_src = yaml.serialize(node) mapping = loader.construct_mapping(node) if '.' not in tag_suffix: classname = tag_suffix rval = ObjectProxy(classname, mapping, yaml_src) else: classname = try_to_import(tag_suffix) rval = ObjectProxy(classname, mapping, yaml_src) return rval
def multi_constructor_pkl(loader, tag_suffix, node): """ Constructor function passed to PyYAML telling it how to load objects from paths to .pkl files. See PyYAML documentation for details on the call signature. """ mapping = loader.construct_yaml_str(node) if tag_suffix != "" and tag_suffix != u"": raise AssertionError('Expected tag_suffix to be "" but it is "' + tag_suffix + '"') rval = ObjectProxy(None, {}, yaml.serialize(node)) rval.instance = serial.load(mapping) return rval
def test_dump_2(self): """ generate node object2 for 2 task """ m = ["This is log 1.", "this is log message 2."] loglist_orig = dpcore.load_logs(m) nodeobjlist = dpcore.yamlnodedump_logs(loglist_orig) nodelist = yaml.SequenceNode(tag=u"tag:yaml.org,2002:seq", value=nodeobjlist, flow_style=False) yml = yaml.serialize(nodelist) #print yml c = yaml.load(yml) loglist_comp = dpcore.load_logs(c) for idx in range(2): self.assertEqual(loglist_comp[idx].log, loglist_orig[idx].log) self.assertEqual(loglist_comp[idx].action, loglist_orig[idx].action)
def multi_constructor(loader, tag_suffix, node): """ Constructor function passed to PyYAML telling it how to construct objects from argument descriptions. See PyYAML documentation for details on the call signature. """ yaml_src = yaml.serialize(node) mapping = loader.construct_mapping(node) if '.' not in tag_suffix: classname = tag_suffix rval = ObjectProxy(classname, mapping, yaml_src) else: components = tag_suffix.split('.') modulename = '.'.join(components[:-1]) try: exec('import %s' % modulename) except ImportError, e: # We know it's an ImportError, but is it an ImportError related to # this path, #o r did the module we're importing have an unrelated ImportError? # and yes, this test can still have false positives, feel free to # improve it pieces = modulename.split('.') str_e = str(e) found = True in [piece.find(str(e)) != -1 for piece in pieces] if found: # The yaml file is probably to blame. # Report the problem with the full module path from the YAML # file raise ImportError("Could not import %s; ImportError was %s" % (modulename, str_e)) else: # The module being imported contains an error. # Pass the original exception on up, with the original stack # trace preserved raise try: classname = eval(tag_suffix) except AttributeError: raise AttributeError('Could not evaluate %s' % tag_suffix) rval = ObjectProxy(classname, mapping, yaml_src)
def multi_constructor_include(loader, tag_suffix, node): """ Callback used by PyYAML when a "!include:" tag is encountered. See PyYAML documentation for details on the call signature. """ yaml_src = yaml.serialize(node) construct_mapping(node) mapping = loader.construct_mapping(node) assert hasattr(mapping, 'keys') assert hasattr(mapping, 'values') for key in mapping.keys(): if not isinstance(key, six.string_types): message = "Received non string object (%s) as " \ "key in mapping." % str(key) raise TypeError(message) with open(tag_suffix, 'r') as f: filled_template = f.read() % (mapping) return yaml.load(filled_template)
def test_dump_2(self): """ generate node object2 for 2 task """ m = ["This is task 1.", {"t": "This is task 2.", "sub-task": "This is a subtask.\nwhich have 2 lines.",}] tasklist_orig = dpcore.load_tasks(m) self.assertEqual(1, len(tasklist_orig[1].subtask)) nodeobjlist = dpcore.yamlnodedump_tasks(tasklist_orig) nodelist = yaml.SequenceNode(tag=u"tag:yaml.org,2002:seq", value=nodeobjlist, flow_style=False) yml = yaml.serialize(nodelist) #print yml c = yaml.load(yml) tasklist_comp = dpcore.load_tasks(c) for idx in range(2): self.assertEqual(tasklist_comp[idx].task, tasklist_orig[idx].task) self.assertEqual(tasklist_comp[idx].note, tasklist_orig[idx].note) self.assertEqual(1, len(tasklist_comp[1].subtask))
def test_dump_2(self): """ generate node object2 for 2 story """ m = ["This is story 1.", {"story": "This is story 2.", "sub-story": "This is a substory.\nwhich have 2 lines.", "task": "task 1.",}] storylist_orig = dpcore.load_stories(m) self.assertEqual(1, len(storylist_orig[1].substory)) self.assertEqual(1, len(storylist_orig[1].subtask)) nodeobjlist = dpcore.yamlnodedump_stories(storylist_orig) nodelist = yaml.SequenceNode(tag=u"tag:yaml.org,2002:seq", value=nodeobjlist, flow_style=False) yml = yaml.serialize(nodelist) #print yml c = yaml.load(yml) storylist_comp = dpcore.load_stories(c) for idx in range(2): self.assertEqual(storylist_comp[idx].story, storylist_orig[idx].story) self.assertEqual(storylist_comp[idx].note, storylist_orig[idx].note) self.assertEqual(1, len(storylist_comp[1].substory)) self.assertEqual(1, len(storylist_comp[1].subtask))
def extract_note(note_node, defaults): note = yaml.load(yaml.serialize(note_node)) if 'id' not in note: note['id'] = 0 # We want to update the YAML file with a new note ID, but modifying a YAML # file is complicated. One way to do it is via a lower-level interface. Here # we use the nodes representation, which we modify directly. The nodes # representation can then be converted into a YAML file that fairly # faithfully resembles the original, but with the newly added note ID. note_node.value.insert(0, ( yaml.ScalarNode(tag='tag:yaml.org,2002:str', value='id'), yaml.ScalarNode(tag='tag:yaml.org,2002:int', value=str( note['id'])), )) note.setdefault('deckName', defaults.get("deckName", "Default")) note.setdefault('modelName', defaults.get("modelName", "BasicMathJax")) note.setdefault('useMarkdown', defaults.get("useMarkdown", True)) note.setdefault('markdownStyle', defaults.get("markdownStyle", "tango")) note.setdefault('markdownLineNums', defaults.get("markdownLineNums", False)) note.setdefault('markdownTabLength', defaults.get("markdownTabLength", 4)) note.setdefault('useMarkdownMathExt', defaults.get("useMarkdownMathExt", True)) tags = defaults.get("extraTags", list()).copy() tags.extend(note.get('tags', list())) note['tags'] = ',{},'.format(','.join(sorted(tags))) fields = dict(defaults.get("fields", dict())) fields.update(note.get("fields", dict())) note['fields'] = fields note['node'] = note_node return note
for d in vals[1:]: # # if d[2] == '@': # name_value = '' # else: # name_value = d[2] dont_eat_cheese_its_yuky = yaml.MappingNode( tag='tag:yaml.org,2002:map', value=[( yaml.ScalarNode(tag='tag:yaml.org,2002:str', value=d[2]), # (yaml.ScalarNode(tag='tag:yaml.org,2002:str', value=name_value), yaml.MappingNode( tag='tag:yaml.org,2002:map', value=[(yaml.ScalarNode(tag='tag:yaml.org,2002:str', value='type'), yaml.ScalarNode(tag='tag:yaml.org,2002:str', value=d[1])), (yaml.ScalarNode(tag='tag:yaml.org,2002:str', value='value'), yaml.ScalarNode(tag='tag:yaml.org,2002:str', value=d[3]))]))]) hate_you.append( yaml.safe_load(yaml.serialize(dont_eat_cheese_its_yuky))) with open(f'done/{shit}.yaml', 'w+') as file: documents = yaml.dump(hate_you, file, explicit_start=True, width=float("inf"))
def print_pr(mapping): import yaml r = yaml.SafeRepresenter() node = r.represent_data(mapping) partial_resort(node) print yaml.serialize(mapping)
def load_and_send_flashcards(self, filename, opts): with open(filename) as yaml_input_file: log.info("\nSending file '{}' to Anki...\n".format(filename)) new_notes_were_created = False # The reason for the lower-level nodes representation of the YAML file is # that it can be used to make a modified version of the original YAML # file. We do this in two sections of code. In both cases, we add new note # IDs to the YAML file. nodes = yaml.compose(yaml_input_file) query_results, defaults, data, note_nodes, notes = query_note_nodes( nodes, opts) if query_results.empty: log.warning("Query returned no results.") else: log.debug("query_results:\n %s", str(query_results)) # For each note_node in notes_node that matches query: for i in query_results.index: note_id = str(query_results.id[i]) deck = query_results.deckName[i] model = query_results.modelName[i] use_md = query_results.useMarkdown[i] md_sty = query_results.markdownStyle[i] md_lineno = query_results.markdownLineNums[i] md_tablen = query_results.markdownTabLength[i] md_mathext = query_results.useMarkdownMathExt[i] tags = query_results.tags[i].replace(',', '\n').split() fields = query_results.fields[i] note_node = query_results.node[i] description = "{}:{}".format(note_id, fields) log.info("Processing note with ID: {}".format(note_id)) # Check for note with given ID. # Get info for existing note. should_create_new_note = True must_replace_existing_note_id = False result = self.anki.notesInfo([note_id]) if result.get("error", None) or not result['result'][0]: log.info( "Can't find note with ID %s; a new note will be created.", note_id) must_replace_existing_note_id = True else: should_create_new_note = False if should_create_new_note: # No provided ID; assume new note should be created. log.debug("Creating new note...") temporary_fields = { k: self.format_text( str(v), False, md_sty, md_lineno, md_tablen, md_mathext, ) for (k, v) in fields.items() } # Create, obtaining returned ID result = self.anki.addNote(deck, model, temporary_fields, tags) if result.get("error", None): log.warning("Can't create note: %s", description) else: # Add ID to note_node note_id = result['result'] if must_replace_existing_note_id: prev_id = None for k, v in note_node.value: if k.value == 'id': prev_id, v.value = v.value, str(note_id) if prev_id: log.info("ID %s replaced with %s.", prev_id, note_id) else: log.warn("Failed to assign new note ID!") else: # We want to update the YAML file with a new note ID, but # modifying a YAML file is complicated. One way to do it is via a # lower-level interface. Here we use the nodes representation, # which we modify directly. The nodes representation can then be # converted into a YAML file that fairly faithfully resembles the # original, but with the newly added note ID. note_node.value.insert(0, ( yaml.ScalarNode(tag='tag:yaml.org,2002:str', value='id'), yaml.ScalarNode(tag='tag:yaml.org,2002:int', value=str(note_id)), )) new_notes_were_created = True log.debug("Updating existing note...") # Assume provided ID is valid for existing note to be updated. # Convert each field from Markdown (if `use_md` is True). note_uid = uuid.uuid1() converted_fields = { k: self.format_text(str(v), use_md, md_sty, md_lineno, md_tablen, md_mathext, note_id="%s-%s-%s" % (note_id, note_uid, field_no)) for (field_no, (k, v)) in enumerate(fields.items()) } # Update converted note fields... result = self.anki.updateNoteFields(note_id, converted_fields) if result.get("error", None): log.warning("Can't update note: %s", description) continue # Update note tags... ## First get existing note tags. result = self.anki.notesInfo([note_id]) if result.get("error", None): log.warning("Can't get tags for note: %s", description) continue current_tags = sorted(result['result'][0]['tags']) if current_tags != tags: ## Remove existing note tags. result = self.anki.removeTags([note_id], " ".join(current_tags)) if result.get("error", None): log.warning("Can't remove tags for note: %s", description) ## Add new note tags. result = self.anki.addTags([note_id], " ".join(tags)) if result.get("error", None): log.warning("Can't add tags for note: %s", description) if new_notes_were_created: # If any new notes were created, their IDs must be added to YAML file. with open(filename, mode='w') as yaml_output_file: log.info("\nUpdating file '{}' with new note IDs...".format( filename)) yaml_output_file.write(yaml.serialize(nodes))
def load_and_send_flashcards(filename): with open(filename) as yaml_input_file: log.info("\nSending file '{}' to Anki...\n".format(filename)) nodes = yaml.compose(yaml_input_file) data = yaml.load(yaml.serialize(nodes)) defaults = data.get('defaults', None) log.debug("defaults: {}".format(defaults)) def_tags = defaults.get("extraTags", list()) def_deckName = defaults.get("deckName", "Default") def_modelName = defaults.get("modelName", "BasicMathJax") def_fields = defaults.get("fields", dict()) def_useMarkdown = defaults.get("useMarkdown", True) def_markdownStyle = defaults.get("markdownStyle", "tango") def_markdownLineNums = defaults.get("markdownLineNums", False) def_markdownTabLength = defaults.get("markdownTabLength", 4) # Extract notes_node top_map = {k.value: v.value for k, v in nodes.value} note_nodes = top_map['notes'] connection = AnkiConnectClient() # For each note_node in notes_node: new_notes_were_created = False for note_node in note_nodes: # Convert to note_dict note = yaml.load(yaml.serialize(note_node)) tags = note.get('extraTags', def_tags).copy() tags.extend(note.get('tags', list())) tags = sorted(tags) deckName = note.get('deckName', def_deckName) modelName = note.get('modelName', def_modelName) # Set note's fields to defaults, if not already set. fields = dict(def_fields) fields.update(note.get("fields", dict())) # Convert each field from Markdown (if `useMarkdown` is True). fields = { k: format_text( str(v), note.get('useMarkdown', def_useMarkdown), note.get('markdownStyle', def_markdownStyle), note.get('markdownLineNums', def_markdownLineNums), note.get('markdownTabLength', def_markdownTabLength), ) for (k, v) in fields.items() } should_create_new_note = True must_replace_existing_note_id = False if 'id' in note: # Check for note with given ID. log.debug("Checking for existing note...") note_id = note['id'] # Get info for existing note. response, result = connection.send_as_json( action="notesInfo", params=dict(notes=[note_id])) if result.get("error", None) or not result['result'][0]: report_anki_error(result, "\nCan't find note with ID: %s.", note_id) log.info("The ID will be ignored, and a new note created.") must_replace_existing_note_id = True else: should_create_new_note = False if should_create_new_note: # No provided ID; assume new note should be created. log.debug("Creating new note...") # Create, obtaining returned ID response, result = connection.send_as_json( action="addNote", params=dict(note=dict( deckName=deckName, modelName=modelName, fields=fields, tags=tags, ))) if result.get("error", None): report_anki_error(result, "Can't create note: %s", note) else: # Add ID to note_node note_id = result['result'] if must_replace_existing_note_id: prev_id = None for k, v in note_node.value: if k.value == 'id': prev_id, v.value = v.value, str(note_id) if prev_id: log.info("ID %s replaced with %s.", prev_id, note_id) else: log.warn("Failed to assign new note ID!") else: note_node.value.insert(0, ( yaml.ScalarNode(tag='tag:yaml.org,2002:str', value='id'), yaml.ScalarNode(tag='tag:yaml.org,2002:int', value=str(note_id)), )) new_notes_were_created = True else: # Assume provided ID is valid for existing note to be updated. log.debug("Updating existing note...") # Update note fields... params = dict(note=dict(id=note_id, fields=fields)) log.debug("params: {}".format(params)) response, result = connection.send_as_json( action="updateNoteFields", params=params, ) if result.get("error", None): report_anki_error(result, "Can't update note: %s", note) continue # Update note tags... ## First get existing note tags. response, result = connection.send_as_json( action="notesInfo", params=dict(notes=[note_id])) if result.get("error", None): report_anki_error(result, "Can't get tags for note: %s", note) continue current_tags = sorted(result['result'][0]['tags']) # log.debug("current tags: %s", current_tags) # log.debug("new tags: %s", tags) # log.debug("equal?: %s", current_tags == tags) if current_tags != tags: # log.debug("updating tags.") ## Remove existing note tags. response, result = connection.send_as_json( action="removeTags", params=dict(notes=[note_id], tags=" ".join(current_tags))) if result.get("error", None): report_anki_error(result, "Can't remove tags for note: %s", note) ## Add new note tags. response, result = connection.send_as_json( action="addTags", params=dict(notes=[note_id], tags=" ".join(tags))) if result.get("error", None): report_anki_error(result, "Can't add tags for note: %s", note) if new_notes_were_created: # If any new notes were created, their IDs must be added to YAML file. with open(filename, mode='w') as yaml_output_file: log.info( "\nUpdating file '{}' with new note IDs...".format(filename)) yaml_output_file.write(yaml.serialize(nodes))
def save_merged_documents_as_yaml(merged_document, output_dir, file_name): """Save merged dcouments as yaml into the provided output_dir""" final_yaml = yaml.serialize(merged_document) output_file_path = os.path.join(output_dir, file_name + ".yaml") with open(output_file_path, 'w+') as f: f.write(final_yaml)
def write_project(filename, proj): fp = open(filename, "w") yml = yaml.serialize(yamlnodedump_project(proj), stream=fp, encoding='utf-8', allow_unicode=True) #print repr(yml) #fp.write(yml) fp.close()
def transform_vals_to_string_constructor(loader, node): return dict([(v[0].value, yaml.serialize(v[1])) for v in node.value])
def to_yaml(self, stream=None): return yaml.serialize(self._as_yaml_node(), stream)
def constructor_example_hard(loader, node): print("hard") print(node) value = loader.construct_scalar(node) value2 = yaml.serialize(node) print(value)