def write_content(self, fpath, content = None): if isinstance((content or self.content), str): super(CachedYamlResource, self).write_content(fpath, content) else: with open(fpath, 'wb') as f: import syck syck.dump(content or self.content, f)
def save_to_disk(self, last, info): path = self.get_splash_data_path() self.crypt_pws(info, 'encode') for_yaml = {'users': info, 'last': last} try: with open(path, 'wb') as f: syck.dump(for_yaml, f) except Exception: traceback.print_exc() self.crypt_pws(info, 'decode') return True
class TestPickle(unittest.TestCase): def testSimple(self): self._testPickle(SIMPLE) def testNames(self): self._testPickle(NAMES) def testModules(self): self._testPickle(MODULES) def testObjects(self): self._testPickle(OBJECTS) def testNews(self): self._testPickle(NEWS) def testApplies(self): self._testPickle(APPLIES) def _testPickle(self, (source, object)): for left, right in zip(syck.load(source), object): self.assertEqual(left, right) for left, right in zip(syck.load(syck.dump(object)), object): self.assertEqual(left, right)
def testScalarTypes(self): scalars = syck.load(syck.dump(SCALARS)) for a, b in zip(scalars, SCALARS): self.assertEqual(type(a), type(b)) if type(a) is float: self.assertEqual(repr(a), repr(b)) else: self.assertEqual(a, b)
def testBuggyNodesReduce(self): object = syck.load(BUGGY_NODES) nodes = syck.parse(BUGGY_NODES) output = syck.dump(nodes) #print output nodes2 = syck.load(output) output2 = syck.emit(nodes2) object2 = syck.load(output2) self.assertEqual(object, object2)
def testDumpUnicode(self): for string, tag in UNICODE_STRINGS: #print string document = syck.dump(string) #print document new_tag = syck.parse(document).tag new_string = syck.load(document) self.assertEqual(string, new_string) self.assertEqual(type(string), type(new_string)) self.assertEqual(tag, new_tag)
def testExtensions(self): source = EXTENSIONS[0] object = EXTENSIONS[1] object2 = syck.load(source, Loader=ExLoader) for left, right in zip(object, object2): self.assertEqual(left, right) source2 = syck.dump(object2, Dumper=ExDumper) object3 = syck.load(source2, Loader=ExLoader) for left, right in zip(object, object3): self.assertEqual(left, right)
def save_diagnostic(self): if not self._diaglock.acquire(False): return try: INFO('Scheduled Diagnostic Started') self.diagnostic = Diagnostic() self.diagnostic.prepare_data(fast = True) data = self._get_trimmed_data() INFO('Saving Diagnostic') logdir = sys.LOGFILE_NAME.parent with open(logdir/'diag.yaml', 'w') as diagfile: syck.dump(data, diagfile) INFO('Scheduled Diagnostic Complete') finally: self._diaglock.release()
def AspellIndexToYaml(root=None, outfile=None): import syck index = _GetAspellIndex(root) mydict = {} def RAD_to_dict(rad): res = {} if rad.name_native is not None: res.update(name_native=rad.name_native.encode('utf-8')) res.update(name_english=rad.name_english.encode('utf-8'), location=rad.package_path.encode('utf-8')) return res for d in index: mydict[d.id.encode('utf-8')] = RAD_to_dict(d) return syck.dump(mydict, outfile)
def get_prefs(self): if profile() is None: return '' items = profile.prefs.items() seen_keys = set() alone_and_subtree = set() for key in sorted(i[0] for i in items): prefix = key.rsplit('.', 1)[0] if prefix in seen_keys: alone_and_subtree.add(prefix) seen_keys.add(key) items = [item if item[0] not in alone_and_subtree else (item[0] + '_alone_', item[1]) for item in items] try: return syck.dump(inflate(items)) except Exception: return format_exc()
def testFileOutput(self): output = StringIO.StringIO() syck.dump(EXAMPLE, output) output.seek(0) object = syck.load(output) self.assertEqual(object, EXAMPLE)
def save(self): logger.debug("saving pkginfo ...") fn = os.path.join(self.dirname,PKGDATA) f = open(fn,"w") yaml.dump(self,f,style="plain") # possible styles: 1quote,2quote,fold,literal,plain f.close()
def main(self): if(self.headerfile): f=open(self.headerfile) header=''.join(f.readlines()) f.close() else: header=default_header for filename in self.files: print '\n'+'-'*20 print 'Opening File', filename f = file(filename, "r") lexer = MatlabLexer.Lexer(f) ### create a lexer for calculator print 'Starting Parser' p = MatParse(lexer) p.script() print 'Parser Complete' a = p.getAST() root = ASTtoTree(a) rules=BaseRules.BaseRules() for node in root: node.assert_all() #clips.PrintAgenda() #clips.SaveFacts(filename.replace('.m','.facts')) rules.run() #clips.SaveFacts(filename.replace('.m','.facts2')) if(self.astdump): outfile = filename.replace('.m', '.ast') print 'writing to file', outfile f = open(outfile, 'w') astlist=[node.to_list() for node in root] if(has_syck and False): f.write(syck.dump(astlist, Dumper=DumpNoTuple)) else: f.write(pprint.pformat(astlist)) f.close() if(0): outfile = filename.replace('.m', '.nast') print 'writing to file', outfile f=open(outfile, 'w') if(has_syck and False): f.write(syck.dump(root, Dumper=DumpNoTuple)) else: f.write(pprint.pformat(root)) walk = Mat2PyTrans() print "Starting Translator" s=walk.translate(root) #Simple conversions s=re.sub(r'pi\(\)','pi',s) s=re.sub(r'Inf\(\)','inf',s) s=re.sub(r'nan\(\)','nan',s) s=re.sub(r'int\(([\d]+)\.\)',r'\1',s) def sub_eval(m): return str(eval(m.group(1))) s=re.sub(r"np.random.rand\('state',","random.set_state(",s) #s=re.sub(r"random.rand\(\),","random.set_state(",s) s=re.sub(r'([\d]+\.?-1)',sub_eval,s) #s=re.sub(r'matdiv\((.+?),\ (\d+.)\)',r'(\1)/\2',s) #s=re.sub(r'matdiv\((\d+.),\ ',r'\1/(',s) #s=re.sub(r'np.dot\((.+?),\ (\d+.)\)',r'(\1)*\2',s) #s=re.sub(r'np.dot\((\d+.),\ ',r'\1*(',s) s=re.sub(r'shape\.Error\(([\w\.]+),\ ([\w\.]+)\)',r'\1.shape[\2-1]',s) s=re.sub(r'shape\.Error\((\w+)\)',r'\1.shape',s) s=re.sub(r'\.flatten\(1\)\.conj\(\)\.T',r'.flatten(0).conj()',s) s=re.sub(r'\.flatten\(1\)\.T',r'.flatten(0)',s) s=re.sub(r'\.flatten\(1\)\.T',r'.flatten(0)',s) s=re.sub(r'xend',r'0',s) print 'Translation Complete' #print p.var_names() outfile=filename.replace('.m', '.py') print 'writing to file', outfile f=open(outfile,'w') f.write( header ) f.write(s) f.close()
def roundTrip(self, obj, yaml): self.assertEqual(obj, syck.load(syck.dump(yaml))) self.assertEqual(obj, syck.load(yaml))
def run(args): src = path(args.src) revs = enumerate_revisions(src) dist = path(args.dist) feature_pth = dist / args.feature from StringIO import StringIO from collections import defaultdict from util.primitives.structures import oset versions = oset() groups = defaultdict(list) for domain, locale, pofile, catalog, template_version in versioned_pos('.'): versions.add(template_version) groups[template_version].append((domain, locale, pofile, catalog)) for template_version in versions: plugins = {} template_root = feature_pth / template_version for domain, locale, pofile, catalog in groups[template_version]: revid, revno = revs[src.relpathto(pofile).expand()] out_zip = template_root / locale / '-'.join([domain, template_version, locale, str(revno)]) + '.zip' if not out_zip.parent.isdir(): out_zip.parent.makedirs() mobuf = StringIO() write_mo(mobuf, catalog) zbuf = StringIO() z = zipfile.ZipFile(zbuf, 'w', zipfile.ZIP_DEFLATED) z.writestr('-'.join([domain, locale]) + '.mo', mobuf.getvalue()) infoyaml = info_yaml(args.feature, domain, locale) try: infoyaml['name'] = u'%s (%s)' % (babel.Locale(locale).get_display_name('en'), babel.Locale(locale).get_display_name(locale)) except Exception: pass infoyaml['pot_version'] = template_version infoyaml['bzr_revno'] = revno infoyaml['bzr_revid'] = revid infoyaml['catalog_format'] = 'mo' infoyaml_bin = syck.dump(infoyaml) z.writestr(INFOYAML, infoyaml_bin) z.close() zout = zbuf.getvalue() with out_zip.open('wb') as out: out.write(zout) infoyaml_pth =(out_zip.parent/INFOYAML) with infoyaml_pth.open('wb') as infoyaml_out: infoyaml_out.write(infoyaml_bin) plugins[infoyaml['shortname']] = dict( meta = httprelpath(template_root.relpathto(infoyaml_pth)), dist_types = ZIP_DIST, zip = dict( location = httprelpath(template_root.relpathto(out_zip)) ) ) idxyaml = template_root / 'index.yaml' idxbin = syck.dump(dict(plugins=plugins)) with idxyaml.open('wb') as idx_out: idx_out.write(idxbin) update_pth = feature_pth / 'update.yaml' with open(update_pth, 'wb') as update_out: update_out.write(syck.dump({'all':{'release':httprelpath(feature_pth.relpathto(idxyaml))}})) try: site_d = syck.load(wget('http://s3.amazonaws.com/update.digsby.com/' + dist.name + '/site.yaml')) except Exception: traceback.print_exc() site_d = {} try: featurs = site_d['features'] except KeyError: featurs = site_d['features'] = {} featurs[args.feature]= { 'name':args.name, 'url': httprelpath(dist.relpathto(update_pth)), } with open(dist / 'site.yaml', 'wb') as site_out: site_out.write(syck.dump(site_d))
rid = sha1(str(time.time())).hexdigest() # Put together message. r = {} r[':msgtime'] = int(time.time()) r[':filter'] = { 'identity': [], 'fact': [], 'agent': [], 'cf_class': [], } r[":requestid"] = rid r[":callerid"] = 'cert=%s' % CERTNAME r[":senderid"] = 'pythontest' r[":msgtarget"] = target r[':body'] = yaml.dump('ping') h = rr.sign(sha1(r[':body']).digest(), 'sha1') r[':hash'] = h.encode('base64').replace("\n", "").strip() data = yaml.dump(r) s.put(data, target) time.sleep(2) results = [] while True: x = None try: x = s.get_nowait() print x except: break if not x:
def testStringOutput(self): source = syck.dump(EXAMPLE) object = syck.load(source) self.assertEqual(object, EXAMPLE)
def testCollectionTypes(self): collections = syck.load(syck.dump(COLLECTIONS)) for a, b in zip(collections, COLLECTIONS): self.assertEqual(type(a), type(b)) self.assertEqual(a, b)
def _testAlias(self, objects): objects = syck.load(syck.dump(objects)) for object in objects: self.assert_(object is objects[0])
def testOddAliases(self): document = syck.parse(syck.dump(ODD_ALIASES)) for group in document.value: for item in group.value[1:]: self.assert_(item is not group.value[0])
def serialize(self, f): syck.dump(self.data, f)
def testNonsenseOutput(self): self.assertRaises(AttributeError, lambda: syck.dump(EXAMPLE, 'output'))