def verify(self): self.fp.seek(0) cont = self.fp.read() try: json.loads(cont) isB64 = True except ValueError: isB64 = False return isB64
def verify(self): self.fp.seek(0) try: cont = self.fp.read() json.loads(cont) isB64 = True except (UnicodeDecodeError,ValueError): isB64 = False return isB64
def verify(self): self.fp.seek(0) try: cont = self.fp.read() json.loads(cont) isB64 = True except (UnicodeDecodeError, json.DecodeError): isB64 = False return isB64
def callback_ctx(self, amqp_message): # TODO handle possible json.loads errors ctx = json.loads(json.loads(amqp_message.body)) try: self.callback_func(**ctx) except BaseException as e: log_exception('AmqpClient callback exception:\n') try: self.chan.basic_ack(amqp_message.delivery_tag) except BaseException as e: logging.error('AmqpClient could not send ACK to queue: %s' % e)
def manifest_from_url(self, url): urlobj = urllib.urlopen(url) # urllib does handle https assert urlobj.getcode( ) == 200, "Could not access the contrib catalog URL: %s" % url manifest = urlobj.read() manifest = ExtMap(json.loads(manifest)) return manifest
def toResinfo(self): result = super(self.__class__, self).toResinfo() if self.format == "b64" and self.path: cont = filetool.read(self.path) cont = json.loads(cont) result.append(cont) return result
def reduceLoop(startNode): treeModified = False conditionNode = None # Can only reduce constant condition expression if startNode.type != "constant": return treeModified # Can only reduce a condition expression # of a loop context node = startNode while(node): # find the loop's condition node if node.parent and node.parent.type == "loop" and node.parent.getFirstChild(ignoreComments=True)==node: conditionNode = node break node = node.parent if not conditionNode: return treeModified # handle "if" statements if conditionNode.parent.get("loopType") == "IF": loopNode = conditionNode.parent # startNode must be only condition if startNode==conditionNode or isDirectDescendant(startNode, conditionNode): value = startNode.get("value") if startNode.get("constantType") == 'string': value = '"' + value + '"' # re-parse into an internal value value = json.loads(value) condValue = bool(value) #print "optimizing: if" treeutil.inlineIfStatement(loopNode, condValue) treeModified = True return treeModified
def __init__(self, path): self.path = path try: mf = codecs.open(path, "r", "utf-8") manifest = json.loads(mf.read()) mf.close() except Exception, e: msg = "Reading of manifest file failed: '%s'" % path + ("\n%s" % e.args[0] if e.args else "") e.args = (msg,) + e.args[1:] raise
def __init__(self, path): self.path = path try: mf = codecs.open(path, "r", "utf-8") manifest = json.loads(mf.read()) mf.close() except Exception, e: msg = "Reading of manifest file failed: '%s'" % path + ( "\n%s" % e.args[0] if e.args else "") e.args = (msg,) + e.args[1:] raise
def parseMetaFile(self, path): # Read the .meta file # it doesn't seem worth to apply caching here meta_fname = os.path.splitext(path)[0] + '.meta' try: meta_content = filetool.read(meta_fname) fontDict = json.loads(meta_content) except Exception, e: msg = "Reading of .meta file failed: '%s'" % meta_fname + ( "\n%s" % e.args[0] if e.args else "") e.args = (msg, ) + e.args[1:] raise
def getSchema(self): relPathToSchema = "/../../../data/config/config_schema.json" schemaPath = os.path.abspath(os.path.dirname(__file__) + relPathToSchema) try: file = codecs.open(schemaPath, "r", "utf-8") schema = json.loads(file.read()) file.close() except Exception, e: msg = "Reading of schema file failed: '%s'" % schemaPath + ( "\n%s" % e.args[0] if e.args else "") e.args = (msg,) + e.args[1:] raise
def __init__(self, path): mf = codecs.open(path, "r", "utf-8") manifest = json.loads(mf.read()) mf.close() self._manifest = manifest libinfo = self._manifest['info'] libprovides = self._manifest['provides'] self.classpath = libprovides['class'] self.translation = libprovides['translation'] self.namespace = libprovides['namespace'] self.encoding = libprovides['encoding'] self.resource = libprovides['resource'] self.type = libprovides['type']
def toResinfo(self): result = super(self.__class__, self).toResinfo() if self.format == "b64" and self.path: try: cont = filetool.read(self.path) cont = json.loads(cont) except Exception, e: msg = "Reading of b64 image file failed: '%s'" % self.path + ( "\n%s" % e.args[0] if e.args else "") e.args = (msg, ) + e.args[1:] raise else: result.append(cont)
def parseMetaFile(self, path): # Read the .meta file # it doesn't seem worth to apply caching here meta_fname = os.path.splitext(path)[0]+'.meta' try: meta_content = filetool.read(meta_fname) fontDict = json.loads(meta_content) except Exception, e: msg = "Reading of .meta file failed: '%s'" % meta_fname + ( "\n%s" % e.args[0] if e.args else "" ) e.args = (msg,) + e.args[1:] raise
def toResinfo(self): result = super(self.__class__, self).toResinfo() if self.format == "b64" and self.path: try: cont = filetool.read(self.path) cont = json.loads(cont) except Exception, e: msg = "Reading of b64 image file failed: '%s'" % self.path + ( "\n%s" % e.args[0] if e.args else "" ) e.args = (msg,) + e.args[1:] raise else: result.append(cont)
def parseMetaFile(self, path): # Read the .meta file # it doesn't seem worth to apply caching here meta_fname = os.path.splitext(path)[0]+'.meta' meta_content = filetool.read(meta_fname) imgDict = json.loads(meta_content) # Loop through the images of the .meta file for imageId, imageSpec_ in imgDict.items(): # sort of like this: imageId : [width, height, type, combinedUri, off-x, off-y] imageObject = Image() imageObject.id = imageId imageObject = imageObject.fromMeta(imageSpec_) self.embeds.append(imageObject) return
def parseMetaFile(self, path): # Read the .meta file # it doesn't seem worth to apply caching here meta_fname = os.path.splitext(path)[0] + '.meta' meta_content = filetool.read(meta_fname) imgDict = json.loads(meta_content) # Loop through the images of the .meta file for imageId, imageSpec_ in imgDict.items(): # sort of like this: imageId : [width, height, type, combinedUri, off-x, off-y] imageObject = Image() imageObject.id = imageId imageObject = imageObject.fromMeta(imageSpec_) self.embeds.append(imageObject) return
def parseMetaFile(self, path): # Read the .meta file # it doesn't seem worth to apply caching here meta_fname = os.path.splitext(path)[0]+'.meta' meta_content = filetool.read(meta_fname) imgDict = json.loads(meta_content) # Loop through the images of the .meta file for imageId, imageSpec_ in imgDict.items(): self._console.debug("found embedded image: %r" % imageId) # sort of like this: imagePath : [width, height, type, combinedUri, off-x, off-y] imageObject = ImgInfoFmt(imageSpec_) # turn this into an ImgInfoFmt object, to abstract from representation in .meta file self.embeds[imageId] = imageObject return
def getQxPath(): path = QOOXDOO_PATH # OS env takes precedence if os.environ.has_key("QOOXDOO_PATH"): path = os.environ["QOOXDOO_PATH"] # else use QOOXDOO_PATH from config.json else: config_file = ShellOptions.config if os.path.exists(config_file): # try json parsing with qx json if not path.startswith('${'): # template macro has been resolved sys.path.insert(0, os.path.join(path, QX_PYLIB)) try: from misc import json got_json = True except: got_json = False got_path = False if got_json: config_str = codecs.open(config_file, "r", "utf-8").read() #config_str = stripComments(config_str) # not necessary under demjson config = json.loads(config_str) p = config.get("let") if p: p = p.get("QOOXDOO_PATH") if p: path = p got_path = True # regex parsing - error prone if not got_path: qpathr = re.compile(r'"QOOXDOO_PATH"\s*:\s*"([^"]*)"\s*,?') conffile = codecs.open(config_file, "r", "utf-8") aconffile = conffile.readlines() for line in aconffile: mo = qpathr.search(line) if mo: path = mo.group(1) break # assume first occurrence is ok path = os.path.normpath( os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), path)) return path
def getQxPath(): path = QOOXDOO_PATH # OS env takes precedence if os.environ.has_key("QOOXDOO_PATH"): path = os.environ["QOOXDOO_PATH"] # else use QOOXDOO_PATH from config.json else: config_file = ShellOptions.config if os.path.exists(config_file): # try json parsing with qx json if not path.startswith("${"): # template macro has been resolved sys.path.insert(0, os.path.join(path, QX_PYLIB)) try: from misc import json got_json = True except: got_json = False got_path = False if got_json: config_str = codecs.open(config_file, "r", "utf-8").read() config_str = stripComments(config_str) config = json.loads(config_str) p = config.get("let") if p: p = p.get("QOOXDOO_PATH") if p: path = p got_path = True # regex parsing - error prone if not got_path: qpathr = re.compile(r'"QOOXDOO_PATH"\s*:\s*"([^"]*)"\s*,?') conffile = codecs.open(config_file, "r", "utf-8") aconffile = conffile.readlines() for line in aconffile: mo = qpathr.search(line) if mo: path = mo.group(1) break # assume first occurrence is ok path = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), path)) return path
def parseTestJson(jsonString): apps = {} jData = json.loads(jsonString) format_in = "%Y-%m-%d_%H-%M-%S" format_out = "%Y-%m-%d_%H:%M:%S" for app, appVals in jData.items(): if appVals['BuildError'] != None: continue apps[app] = {} # build start time stime = datetime.datetime.strptime(appVals['BuildStarted'], format_in) apps[app]['stime'] = stime.strftime(format_out) # build end time etime = datetime.datetime.strptime(appVals['BuildFinished'], format_in) apps[app]['etime'] = etime.strftime(format_out) # build duration in secs apps[app]['duration'] = timedelta_to_seconds(etime - stime) return apps
def take_action(self, action, dest, opt, value, values, parser): if action == "extend": lvalue = value.split(",") while "" in lvalue: lvalue.remove("") values.ensure_value(dest, []).extend(lvalue) elif action == "map": keyval = value.split(":", 1) if len(keyval) == 2 and len(keyval[0]) > 0: if keyval[1][0] in ["[", "{"]: # decode a Json value val = json.loads(keyval[1]) else: val = keyval[1] values.ensure_value(dest, {})[keyval[0]] = val else: raise OptionValueError("Value has to be of the form '<key>:<val>': %s" % value) else: Option.take_action( self, action, dest, opt, value, values, parser)
def reduceLoop(startNode): treeModified = False conditionNode = None loopType = None # Can only reduce constant condition expression if startNode.type != "constant": return treeModified # Can only reduce a condition expression, # i.e. a "loop/expression/..." context node = startNode while node: if node.type == "expression" and node.parent and node.parent.type == "loop": conditionNode = node break node = node.parent if not conditionNode: return treeModified # handle "if" statements if conditionNode.parent.get("loopType") == "IF": loopNode = conditionNode.parent # startNode must be only condition if isDirectDescendant(startNode, conditionNode): value = startNode.get("value") if startNode.get("constantType") == "string": value = '"' + value + '"' # re-parse into an internal value value = json.loads(value) condValue = bool(value) # print "optimizing: if" treeutil.inlineIfStatement(loopNode, condValue) treeModified = True return treeModified
def manifest_from_url(self, url): urlobj = urllib.urlopen(url) # urllib does handle https assert urlobj.getcode() == 200, "Could not access the contrib catalog URL: %s" % url manifest = urlobj.read() manifest = ExtMap(json.loads(manifest)) return manifest
def CreateDemoJson(dest, qxdir): source = [] build = [] scategories = {} bcategories = {} # Pre-processing JSON = {} # top-level includes default_json = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])) , 'default.json') assert os.path.isfile(default_json) JSON['include'] = [{ "path" : "%s" % default_json }] # per-demo template file tmpl_json = os.path.join(os.path.dirname(sys.argv[0]) , 'tmpl.json') tmpl_manifest = os.path.join(os.path.dirname(sys.argv[0]) , TMPL_MANIFEST) tmpl_config = os.path.join(os.path.dirname(sys.argv[0]) , TMPL_CONFIG) json_tmpl = open(tmpl_json,"rU").read() demo_ns = "%s.demo" % namespace manifest_tmpl = json.loads(open(tmpl_manifest, 'rU').read()) manifest_tmpl['provides']['namespace'] = demo_ns config_tmpl = json.loads(open(tmpl_config, 'rU').read()) config_tmpl['let']['QOOXDOO_PATH'] = os.path.join('..', qxdir) config_tmpl['jobs']['source-demos']['let']['APPLICATION'] = demo_ns fn = os.path.basename(tmpl_manifest)[len('tmpl.'):] # file name open(os.path.join(dest, '..', '..', fn), 'w').write(json.dumps(manifest_tmpl, indent=2, sort_keys=True)) fn = os.path.basename(tmpl_config)[len('tmpl.'):] open(os.path.join(dest, '..', '..', fn), 'w').write(json.dumps(config_tmpl, indent=2, sort_keys=True)) # jobs section JSON['jobs'] = {} # allow exported jobs to be shadowed JSON['config-warnings'] = {} shadowed_jobs = [] JSON['config-warnings']['job-shadowing'] = shadowed_jobs # Process demo html files while True: html = (yield) #print html if html == None: # terminate the generator part and go to finalizing json file break category, name = demoCategoryFromFile(html) #print ">>> Processing: %s.%s..." % (category, name) # check for demo-specific config file config_file = os.path.splitext(html)[0] + ".json" if os.path.exists(config_file): JSON['include'].append({"path":"%s" % config_file}) demo_config = json.loadStripComments(config_file) shadowed_jobs.extend(demo_config['export']) # build classname simple = "%s.%s" % (category,name) source.append("source-%s" % simple) build.append("build-%s" % simple) if not category in scategories: scategories[category] = [] scategories[category].append("source-%s" % (simple,)) if not category in bcategories: bcategories[category] = [] bcategories[category].append("build-%s" % (simple,)) # concat all currcont = json_tmpl.replace('XXX',"%s.%s"%(category,name)).replace("YYY",name).replace("ZZZ",category) templatejobs = json.loads("{" + currcont + "}") for job,jobval in templatejobs.iteritems(): JSON['jobs'][job] = jobval # Post-processing for category in scategories: currentry = JSON['jobs']["source-%s" % category] = {} currentry['run'] = sorted(scategories[category]) currentry = JSON['jobs']["build-%s" % category] = {} currentry['run'] = sorted(bcategories[category]) JSON['jobs']["source"] = { "run" : sorted(source) } JSON['jobs']["build"] = { "run" : sorted(build) } cont = '// This file is dynamically created by the generator!\n' cont += json.dumps(JSON, sort_keys=True, indent=2) filetool.save(os.path.join('demobrowser', fJSON), cont) yield # final yield to provide for .send(None) of caller
def CreateDemoJson(): source = [] build = [] scategories = {} bcategories = {} # Pre-processing JSON = {} # top-level includes default_json = 'tool' + '/' + 'default.json' assert os.path.isfile(default_json) JSON['include'] = [{ "path" : "%s" % default_json }] # per-demo template file json_tmpl = open(os.path.join('tool','tmpl.json'),"rU").read() # jobs section JSON['jobs'] = {} # Process demo html files while True: html = (yield) #print html if html == None: # terminate the generator part and go to finalizing json file break category, name = demoCategoryFromFile(html) #print ">>> Processing: %s.%s..." % (category, name) # check for demo-specific config file config_file = os.path.splitext(html)[0] + ".json" if os.path.exists(config_file): JSON['include'].append({"path":"%s" % config_file}) # build classname simple = "%s.%s" % (category,name) source.append("source-%s" % simple) build.append("build-%s" % simple) if not category in scategories: scategories[category] = [] scategories[category].append("source-%s" % (simple,)) if not category in bcategories: bcategories[category] = [] bcategories[category].append("build-%s" % (simple,)) # concat all currcont = json_tmpl.replace('XXX',"%s.%s"%(category,name)).replace("YYY",name).replace("ZZZ",category) templatejobs = json.loads("{" + currcont + "}") for job,jobval in templatejobs.iteritems(): JSON['jobs'][job] = jobval # Post-processing for category in scategories: currentry = JSON['jobs']["source-%s" % category] = {} currentry['run'] = sorted(scategories[category]) currentry = JSON['jobs']["build-%s" % category] = {} currentry['run'] = sorted(bcategories[category]) JSON['jobs']["source"] = { "run" : sorted(source) } JSON['jobs']["build"] = { "run" : sorted(build) } cont = '// This file is dynamically created by the generator!\n' cont += json.dumps(JSON, sort_keys=True, indent=2) filetool.save(fJSON, cont) yield # final yield to provide for .send(None) of caller
## # syntax: $0 <file.js> -- parse JS file and write it to console again # # An experimental hybrid deserializer-serializer that uses 'esparse' to parse # the JS, then uses a Moz AST to treegenerator_1 AST transformer, and writes out # the resulting tree. ## import re, os, sys, types, codecs QOOXDOO_PATH = os.path.abspath(os.path.dirname(__file__) + "/../../../..") execfile(QOOXDOO_PATH + "/tool/bin/qxenviron.py") from ecmascript.transform import moztree_to_tree1 from generator.runtime.ShellCmd import ShellCmd from misc import json shell = ShellCmd() cmd = "esparse --raw --loc " + sys.argv[1] #print cmd rcode, stdout, errout = ( shell.execute_piped(cmd)) if rcode != 0: print errout sys.exit(1) tree_json = json.loads(stdout) node = moztree_to_tree1.esprima_to_tree1(tree_json) #print node.toXml() #import pydb; pydb.debugger() def opts():pass opts.breaks = False print node.toJS(opts)
def CreateDemoJson(): source = [] build = [] scategories = {} bcategories = {} # Pre-processing JSON = {} # top-level includes default_json = 'tool' + '/' + 'default.json' assert os.path.isfile(default_json) JSON['include'] = [{"path": "%s" % default_json}] # per-demo template file json_tmpl = open(os.path.join('tool', 'tmpl.json'), "rU").read() # jobs section JSON['jobs'] = {} # Process demo html files while True: html = (yield) #print html if html == None: # terminate the generator part and go to finalizing json file break category, name = demoCategoryFromFile(html) #print ">>> Processing: %s.%s..." % (category, name) # check for demo-specific config file config_file = os.path.splitext(html)[0] + ".json" if os.path.exists(config_file): JSON['include'].append({"path": "%s" % config_file}) # build classname simple = "%s.%s" % (category, name) source.append("source-%s" % simple) build.append("build-%s" % simple) if not category in scategories: scategories[category] = [] scategories[category].append("source-%s" % (simple, )) if not category in bcategories: bcategories[category] = [] bcategories[category].append("build-%s" % (simple, )) # concat all currcont = json_tmpl.replace('XXX', "%s.%s" % (category, name)).replace( "YYY", name).replace("ZZZ", category) templatejobs = json.loads("{" + currcont + "}") for job, jobval in templatejobs.iteritems(): JSON['jobs'][job] = jobval # Post-processing for category in scategories: currentry = JSON['jobs']["source-%s" % category] = {} currentry['run'] = sorted(scategories[category]) currentry = JSON['jobs']["build-%s" % category] = {} currentry['run'] = sorted(bcategories[category]) JSON['jobs']["source"] = {"run": sorted(source)} JSON['jobs']["build"] = {"run": sorted(build)} cont = '// This file is dynamically created by the generator!\n' cont += json.dumps(JSON, sort_keys=True, indent=2) filetool.save(fJSON, cont) yield # final yield to provide for .send(None) of caller