def _handleI18N(script, generator): context.console.info("Processing localisation data") context.console.indent() approot = context.jobconf.get("provider/app-root", "./provider") # get class projection class_list = [] needs_cldr = False for classObj in script.classesObj: if passesOutputfilter(classObj.id): class_list.append(classObj.id) if not needs_cldr and classObj.getHints('cldr'): needs_cldr = True # get i18n data context.console.info("Getting translations") trans_dat = generator._locale.getTranslationData(class_list, script.variants, script.locales, addUntranslatedEntries=True) loc_dat = None if needs_cldr: context.console.info("Getting CLDR data") loc_dat = generator._locale.getLocalizationData(class_list, script.locales) # write translation and cldr files context.console.info("Writing localisation files: ", False) numTrans = len(trans_dat) for num,lang in enumerate(trans_dat): context.console.progress(num+1, numTrans) # translations transmap = {} filename = "i18n-" + lang targetname = "i18n-" + lang translations = trans_dat[lang] for key in translations: if translations[key]: transmap[key] = [ { "target" : targetname, "data" : { key : translations[key] }} ] else: transmap[key] = [ ] filetool.save(approot+"/data/translation/"+filename+".json", json.dumpsCode(transmap)) # cldr localemap = {} filename = "locale-" + lang targetname = "locale-" + lang if loc_dat: # sample: { "cldr" : [ { "target" : "locale-en", "data" : {"alternativeQuotationEnd":'"', "cldr_am": "AM",...}} ]} localekeys = loc_dat[lang] cldr_entry = [ { "target" : targetname, "data" : { }} ] for key in localekeys: if localekeys[key]: cldr_entry[0]['data'][key] = localekeys[key] localemap['cldr'] = cldr_entry filetool.save(approot+"/data/locale/"+filename+".json", json.dumpsCode(localemap)) context.console.outdent() return
def loaderPackages(script, compConf): packagemap = {} for package in script.packages: packageentry = {} packagemap[package.id] = packageentry packageentry['uris'] = package.files return json.dumpsCode(packagemap)
def _handleResources(script, generator, filtered=True): def createResourceInfo(res, resval): resinfo = [ { "target": "resource", "data": { res : resval }} ] #filetool.save(approot+"/data/resource/" + res + ".json", json.dumpsCode(resinfo)) return resinfo def copyResource(res, library): sourcepath = os.path.join(library._resourcePath, res) targetpath = approot + "/resource/" + res filetool.directory(os.path.dirname(targetpath)) shutil.copy(sourcepath, targetpath) return # ---------------------------------------------------------------------- context.console.info("Processing resources: ", False) approot = context.jobconf.get("provider/app-root", "./provider") filetool.directory(approot+"/data") filetool.directory(approot+"/resource") # quick copy of runLogResources, for fast results packages = script.packagesSorted() parts = script.parts variants = script.variants allresources = {} if filtered: # -- the next call is fake, just to populate package.data.resources! _ = generator._codeGenerator.generateResourceInfoCode(script, generator._settings, context.jobconf.get("library",[])) for packageId, package in enumerate(packages): allresources.update(package.data.resources) else: # get the main library mainlib = [x for x in script.libraries if x.namespace == script.namespace][0] reslist = mainlib.getResources() allresources = ResourceHandler.createResourceStruct(reslist, updateOnlyExistingSprites = False) # get resource info resinfos = {} numResources = len(allresources) for num,res in enumerate(allresources): context.console.progress(num+1, numResources) # fake a classId-like resourceId ("a.b.c"), for filter matching resId = os.path.splitext(res)[0] resId = resId.replace("/", ".") if filtered and not passesOutputfilter(resId): continue resinfos[res] = createResourceInfo(res, allresources[res]) # extract library name space if isinstance(allresources[res], types.ListType): # it's an image = [14, 14, u'png', u'qx' [, u'qx/decoration/Modern/checkradio-combined.png', 0, 0]] library_ns = allresources[res][3] else: # html page etc. = "qx" library_ns = allresources[res] if library_ns: # library_ns == '' means embedded image -> no copying library = libraries[library_ns] copyResource(res, library) filetool.save(approot+"/data/resource/resources.json", json.dumpsCode(resinfos)) return
def getPackageData(package): data = {} data["resources"] = package.data.resources data["translations"] = package.data.translations data["locales"] = package.data.locales data = json.dumpsCode(data) data += ';\n' return data
def _handleI18N(script, generator): approot = context.jobconf.get("provider/app-root", "./provider") # get class projection class_list = [] needs_cldr = False for classObj in script.classesObj: if passesOutputfilter(classObj.id): class_list.append(classObj.id) if not needs_cldr and classObj.getMeta('cldr'): needs_cldr = True # get i18n data trans_dat = generator._locale.getTranslationData_1(class_list, script.variants, script.locales, addUntranslatedEntries=True) loc_dat = None if needs_cldr: loc_dat = generator._locale.getLocalizationData(class_list, script.locales) # write translation and cldr files for lang in trans_dat: filename = "i18n-" + lang # translations transmap = {} translations = trans_dat[lang] for key in translations: if translations[key]: transmap[key] = [ { "target" : "i18n", "data" : { key : translations[key] }} ] else: transmap[key] = [ ] filetool.save(approot+"/data/translation/"+filename+".json", json.dumpsCode(transmap)) # cldr localemap = {} if loc_dat: localekeys = loc_dat[lang] for key in localekeys: if localekeys[key]: localemap[key] = [ { "target" : "i18n", "data" : { key : localekeys[key] }} ] else: localemap[key] = [ ] filetool.save(approot+"/data/locale/"+filename+".json", json.dumpsCode(localemap)) return
def loaderClosureParts(script, compConf): cParts = {} bootPkgId = bootPackageId(script) for part in script.parts.values(): closurePackages = [isClosurePackage(p, bootPkgId) for p in part.packages if p.id != bootPkgId] # the 'boot' package may be the only non-closure package if closurePackages and all(closurePackages): cParts[part.name] = True return json.dumpsCode(cParts)
def getPackageData(package): data = {} data["resources"] = package.data.resources if not self._job.get("packages/i18n-as-parts", False): data["translations"] = package.data.translations data["locales"] = package.data.locales data = json.dumpsCode(data) data += ';\n' return data
def loaderScriptUris_1(script, compConf): uris = [] for package in script.packagesSorted(): package_scripts = [] uris.append(package_scripts) for script in package: script_entry = "%s:%s" % (libname, file_basename) package_scripts.append(script_entry) return json.dumpsCode(uris)
def CreateDemoData(destdir): dist = os.path.join(destdir, demoDataFn) res = [] ocategory = "" while True: (htmlFilePath, category, demo) = (yield) if htmlFilePath == None: break # init new category if category != ocategory: ocategory = category resCategory = {} res.append(resCategory) resCategory["classname"] = category resCatDemos = [] resCategory["tests"] = resCatDemos # init new demo resDemo = {} resCatDemos.append(resDemo) # get the tags jsitem = demo[0:demo.find("html")] + "js" jsfile = os.path.join(demosSourcePath, category, jsitem) tags = getTagsFromJsFile(jsfile) title = os.path.splitext(demo)[0] if "_" in title: basename, nr = title.split("_", 1) else: basename, nr = (title, 0) title = title.replace("_", " ") resDemo["nr"] = nr resDemo["title"] = title resDemo["name"] = demo resDemo["tags"] = list(tags) # Write demodata.js file if not os.path.exists(destdir): os.makedirs(destdir) content = json.dumpsCode(res) outputFile = codecs.open(dist, encoding="utf-8", mode="w", errors="replace") outputFile.write(content) outputFile.flush() outputFile.close() yield # final yield to catch caller's .send(None)
def partsMap(script): partData = {} packages = script.packagesSortedSimple() #print "packages: %r" % packages for part in script.parts: partData[part] = script.parts[part].packagesAsIndices(packages) #print "part '%s': %r" % (part, script.parts[part].packages) partData = json.dumpsCode(partData) return partData
def CreateDemoData(destdir): dist = os.path.join(destdir, demoDataFn) res = [] ocategory = "" while True: (htmlFilePath, category, demo) = (yield) if htmlFilePath == None: break # init new category if category != ocategory: ocategory = category resCategory = {} res.append(resCategory) resCategory["classname"] = category resCatDemos = [] resCategory["tests"] = resCatDemos # init new demo resDemo = {} resCatDemos.append(resDemo) # get the tags jsitem = demo[0:demo.find("html")] + "js" jsfile = os.path.join(demosSourcePath, category, jsitem) tags = getTagsFromJsFile(jsfile) title = os.path.splitext(demo)[0] if "_" in title: basename, nr = title.split("_",1) else: basename, nr = (title, 0) title = title.replace("_", " ") resDemo["nr"] = nr resDemo["title"] = title resDemo["name"] = demo resDemo["tags"] = list(tags) # Write demodata.js file if not os.path.exists(destdir): os.makedirs(destdir) content = json.dumpsCode(res) outputFile = codecs.open(dist, encoding="utf-8", mode="w", errors="replace") outputFile.write(content) outputFile.flush() outputFile.close() yield # final yield to catch caller's .send(None)
def loaderPartsMap(script, compConf): partData = {} packages = script.packagesSorted() #print "packages: %r" % packages for part in script.parts: #partData[part] = script.parts[part].packagesAsIndices(packages) partData[part] = [] for package in script.parts[part].packages: partData[part].append(package.id) #print "part '%s': %r" % (part, script.parts[part].packages) partData = json.dumpsCode(partData) return partData
def _handleResources(script, generator): def createResourceInfo(res, resval): resinfo = [ { "target": "resource", "data": { res : resval }} ] #filetool.save(approot+"/data/resource/" + res + ".json", json.dumpsCode(resinfo)) return resinfo def copyResource(res, library): sourcepath = os.path.join(library['path'], library['resource'], res) targetpath = approot + "/resource/" + res filetool.directory(os.path.dirname(targetpath)) shutil.copy(sourcepath, targetpath) return # ---------------------------------------------------------------------- approot = context.jobconf.get("provider/app-root", "./provider") filetool.directory(approot+"/data") filetool.directory(approot+"/resource") # quick copy of runLogResources, for fast results packages = script.packagesSortedSimple() parts = script.parts variants = script.variants allresources = {} # get resource info # -- the next call is fake, just to populate package.data.resources! _ = generator._codeGenerator.generateResourceInfoCode(script, generator._settings, context.jobconf.get("library",[])) for packageId, package in enumerate(packages): allresources.update(package.data.resources) resinfos = {} for res in allresources: # fake a classId-like resourceId ("a.b.c"), for filter matching resId = os.path.splitext(res)[0] resId = resId.replace("/", ".") if passesOutputfilter(resId): resinfos[res] = createResourceInfo(res, allresources[res]) # extract library name space if isinstance(allresources[res], types.ListType): # it's an image = [14, 14, u'png', u'qx' [, u'qx/decoration/Modern/checkradio-combined.png', 0, 0]] library_ns = allresources[res][3] else: # html page etc. = "qx" library_ns = allresources[res] library = libraries[library_ns] copyResource(res, library) filetool.save(approot+"/data/resource/resources.json", json.dumpsCode(resinfos)) return
def runLogResources(jobconf, script): if not isinstance(jobconf.get("log/resources", False), types.DictType): return console = Context.console packages = script.packagesSorted() console.info("Dumping resource info..."); console.indent() allresources = {} # get resource info CodeGenerator.packagesResourceInfo(script) # populate package.data.resources for packageId, package in enumerate(packages): allresources.update(package.data.resources) file_ = jobconf.get("log/resources/file", "resources.json") filetool.save(file_, json.dumpsCode(allresources)) console.outdent() return
def runLogResources(jobconf, script): if not isinstance(jobconf.get("log/resources", False), types.DictType): return console = Context.console packages = script.packagesSorted() console.info("Dumping resource info...") console.indent() allresources = {} # get resource info CodeGenerator.packagesResourceInfo( script) # populate package.data.resources for packageId, package in enumerate(packages): allresources.update(package.data.resources) file_ = jobconf.get("log/resources/file", "resources.json") filetool.save(file_, json.dumpsCode(allresources)) console.outdent() return
def getDataString(): data = self.packageData() return json.dumpsCode(data)
def generateLoader(script, compConf, globalCodes, bootCode='', ): self._console.info("Generate loader script") result = "" vals = {} if not script.parts: return result # stringify data in globalCodes for entry in globalCodes: globalCodes[entry] = json.dumpsCode(globalCodes[entry]) # undo damage done by simplejson to raw strings with escapes \\ -> \ globalCodes[entry] = globalCodes[entry].replace('\\\\\\', '\\').replace(r'\\', '\\') # " gets tripple escaped, therefore the first .replace() vals.update(globalCodes) vals["Resources"] = json.dumpsCode({}) # just init with empty map vals["Translations"] = json.dumpsCode(dict((l,None) for l in script.locales)) # init with configured locales vals["Locales"] = json.dumpsCode(dict((l,None) for l in script.locales)) # Name of the boot part vals["Boot"] = loaderBootName(script, compConf) # Code (pot.) of the boot part vals["BootPart"] = loaderBootPart(script, compConf, bootCode) # Translate part information to JavaScript vals["Parts"] = loaderPartsMap(script, compConf) # Translate URI data to JavaScript #vals["Uris"] = loaderScriptUris(script, compConf) # Translate URI data to JavaScript vals["Packages"] = loaderPackages(script, compConf) # Add potential extra scripts vals["UrisBefore"] = loaderUrisBefore(script, compConf) # Add potential extra css vals["CssBefore"] = loaderCssBefore(script, compConf) # Whether boot package is inline vals["BootIsInline"] = loaderBootInline(script, compConf) # Closure package information vals["ClosureParts"] = loaderClosureParts(script, compConf) # Package Hashes #vals["PackageHashes"] = loaderPackageHashes(script, compConf) # Script hook for qx.$$loader.decodeUris() function vals["DecodeUrisPlug"] = loaderDecodeUrisPlug(script, compConf) # Enable "?nocache=...." for script loading? vals["NoCacheParam"] = loaderNocacheParam(script, compConf) # Locate and load loader template template, templatePath = loaderTemplate(script, compConf) # Fill template gives result try: result = loaderFillTemplate(vals, template) except KeyError, e: raise ValueError("Unknown macro used in loader template (%s): '%s'" % (templatePath, e.args[0]))
def loaderBootInline(script, compConf): return json.dumpsCode(inlineBoot(script, compConf))
def loaderScriptUris(script, compConf): uris = packageUrisToJS(script.packagesSorted(), script.buildType) return json.dumpsCode(uris)
def generateBootCode(parts, packages, boot, script, compConf, variants, settings, bootCode, globalCodes, version="source", decodeUrisFile=None, format=False): ## # create a map with part names as key and array of package id's and # return as string def partsMap(script): partData = {} packages = script.packagesSortedSimple() #print "packages: %r" % packages for part in script.parts: partData[part] = script.parts[part].packagesAsIndices(packages) #print "part '%s': %r" % (part, script.parts[part].packages) partData = json.dumpsCode(partData) return partData def fillTemplate(vals, template): # Fill the code template with various vals templ = MyTemplate(template) result = templ.safe_substitute(vals) return result def packageUrisToJS1(packages, version, namespace=None): # Translate URI data to JavaScript allUris = [] for packageId, package in enumerate(packages): packageUris = [] for fileId in package: if version == "build": # TODO: gosh, the next is an ugly hack! #namespace = self._resourceHandler._genobj._namespaces[0] # all name spaces point to the same paths in the libinfo struct, so any of them will do if not namespace: namespace = script.namespace # all name spaces point to the same paths in the libinfo struct, so any of them will do relpath = OsPath(fileId) else: namespace = self._classes[fileId]["namespace"] relpath = OsPath(self._classes[fileId]["relpath"]) shortUri = Uri(relpath.toUri()) packageUris.append("%s:%s" % (namespace, shortUri.encodedValue())) allUris.append(packageUris) return allUris ## # Translate URI data to JavaScript # using Package objects def packageUrisToJS(packages, version): allUris = [] for packageId, package in enumerate(packages): packageUris = [] if package.file: # build namespace = "__out__" fileId = package.file relpath = OsPath(fileId) shortUri = Uri(relpath.toUri()) packageUris.append("%s:%s" % (namespace, shortUri.encodedValue())) else: # "source" : for clazz in package.classes: namespace = self._classes[clazz]["namespace"] relpath = OsPath(self._classes[clazz]["relpath"]) shortUri = Uri(relpath.toUri()) packageUris.append("%s:%s" % (namespace, shortUri.encodedValue())) allUris.append(packageUris) return allUris def loadTemplate(bootCode): # try custom loader templates loaderFile = compConf.get("paths/loader-template", None) if not loaderFile: # use default templates if version=="build": #loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader-build.tmpl.js") # TODO: test-wise using generic template loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader.tmpl.js") else: #loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader-source.tmpl.js") loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader.tmpl.js") template = filetool.read(loaderFile) return template # --------------------------------------------------------------- if not parts: return "" result = "" vals = {} packages = script.packagesSortedSimple() loader_with_boot = self._job.get("packages/loader-with-boot", True) # stringify data in globalCodes for entry in globalCodes: globalCodes[entry] = json.dumpsCode(globalCodes[entry]) # undo damage done by simplejson to raw strings with escapes \\ -> \ globalCodes[entry] = globalCodes[entry].replace('\\\\\\', '\\').replace(r'\\', '\\') # " gets tripple escaped, therefore the first .replace() vals.update(globalCodes) if version=="build": vals["Resources"] = json.dumpsCode({}) # TODO: undo Resources from globalCodes!!! vals["Boot"] = '"%s"' % boot if version == "build": vals["BootPart"] = bootCode else: vals["BootPart"] = "" # fake package data for key, package in enumerate(packages): vals["BootPart"] += "qx.$$packageData['%d']={};\n" % key # Translate part information to JavaScript vals["Parts"] = partsMap(script) # Translate URI data to JavaScript #vals["Uris"] = packageUrisToJS1(packages, version) vals["Uris"] = packageUrisToJS(packages, version) vals["Uris"] = json.dumpsCode(vals["Uris"]) # Add potential extra scripts vals["UrisBefore"] = [] if self._job.get("add-script", False): additional_scripts = self._job.get("add-script",[]) for additional_script in additional_scripts: vals["UrisBefore"].append(additional_script["uri"]) vals["UrisBefore"] = json.dumpsCode(vals["UrisBefore"]) # Whether boot package is inline if version == "source": vals["BootIsInline"] = json.dumpsCode(False) else: vals["BootIsInline"] = json.dumpsCode(loader_with_boot) # Closure package information cParts = {} if version == "build": for part in script.parts: if not loader_with_boot or part != "boot": cParts[part] = True vals["ClosureParts"] = json.dumpsCode(cParts) # Package Hashes vals["PackageHashes"] = {} for key, package in enumerate(packages): if package.hash: vals["PackageHashes"][key] = package.hash else: vals["PackageHashes"][key] = "%d" % key # fake code package hashes in source ver. vals["PackageHashes"] = json.dumpsCode(vals["PackageHashes"]) # Script hook for qx.$$loader.decodeUris() function vals["DecodeUrisPlug"] = "" if decodeUrisFile: plugCode = filetool.read(self._config.absPath(decodeUrisFile)) # let it bomb if file can't be read vals["DecodeUrisPlug"] = plugCode.strip() # Enable "?nocache=...." for script loading? vals["NoCacheParam"] = "true" if self._job.get("compile-options/uris/add-nocache-param", True) else "false" # Add build details vals["Build"] = int(time.time()*1000) vals["Type"] = version # Locate and load loader basic script template = loadTemplate(bootCode) # Fill template gives result result = fillTemplate(vals, template) return result
def do_GET(self): # Mute error messages for favicon.ico requests if self.path == "/favicon.ico": self.send_response(404) self.finish() # Support for active reload # perform a check when the sentinel url is requested elif (self.ar_is_active() and self.path.startswith(AR_Check_Url)): console = Context.console # Get 'since' query parm if self.path.find('?') != -1: self.path, self.query = self.path.split('?', 1) else: self.query = '' query_map = cgi.parse_qs(self.query) assert query_map["since"] since = float(query_map["since"][0]) #ret = 200 if self.check_reload() else 304 # 304=not modified # Return Json data resp_data = {"changed":False} if self.check_reload(since): resp_data["changed"] = True console.info("%s - Signalling reload" % (datetime.datetime.now(),)) resp_string = "qx_AR.script_callback(%s)" % json.dumpsCode(resp_data) self.send_response(200) self.send_header('Content-type', 'text/javascript') self.end_headers() self.wfile.write(resp_string) self.finish() # deliver the active_reload.js when the script url is requested # - this is interesting when the main app is run through different web server elif (self.ar_is_active() and self.path == AR_Script_Url): scriptfile = codecs.open(live_reload.lreload_script, "r", "utf-8") self.send_response(200) self.send_header('Content-type', 'text/javascript') self.end_headers() self.insert_ar_script(scriptfile, self.wfile) scriptfile.close() self.finish() # insert active_reload.js text into index.html # - this is interesting when serving the main app through this web server elif ( self.ar_is_active() and self.path == live_reload.app_url ): file_path = self.translate_path(self.path) indexfile = codecs.open(file_path, "r", "utf-8") self.send_response(200) self.send_header('Content-type', 'text/html') self.end_headers() #indexfile = self.send_head() # sets Content-Length! out = self.wfile insert_before_tag = "</head>" for line in indexfile: if insert_before_tag in line: before, after = line.split(insert_before_tag,1) out.write(before) out.write(' <script type="text/javascript" ') out.write('src="%s%s">' % (live_reload.server_url, AR_Script_Url)) out.write("</script>\n") out.write(insert_before_tag) out.write(after) else: out.write(line) indexfile.close() self.finish() # normal file serving else: CGIHTTPServer.CGIHTTPRequestHandler.do_GET(self)
def loaderUrisBefore(script, compConf): urisBefore = [] additional_scripts = self._job.get("add-script",[]) for additional_script in additional_scripts: urisBefore.append(additional_script["uri"]) return json.dumpsCode(urisBefore)
def generateBootCode(parts, packages, boot, script, compConf, variants, settings, bootCode, globalCodes, version="source", decodeUrisFile=None, format=False): ## # create a map with part names as key and array of package id's and # return as string def partsMap(script): partData = {} packages = script.packagesSortedSimple() #print "packages: %r" % packages for part in script.parts: partData[part] = script.parts[part].packagesAsIndices(packages) #print "part '%s': %r" % (part, script.parts[part].packages) partData = json.dumpsCode(partData) return partData def fillTemplate(vals, template): # Fill the code template with various vals templ = MyTemplate(template) result = templ.safe_substitute(vals) return result def packageUrisToJS1(packages, version, namespace=None): # Translate URI data to JavaScript allUris = [] for packageId, package in enumerate(packages): packageUris = [] for fileId in package: if version == "build": # TODO: gosh, the next is an ugly hack! #namespace = self._resourceHandler._genobj._namespaces[0] # all name spaces point to the same paths in the libinfo struct, so any of them will do if not namespace: namespace = script.namespace # all name spaces point to the same paths in the libinfo struct, so any of them will do relpath = OsPath(fileId) else: namespace = self._classes[fileId]["namespace"] relpath = OsPath(self._classes[fileId]["relpath"]) shortUri = Uri(relpath.toUri()) packageUris.append("%s:%s" % (namespace, shortUri.encodedValue())) allUris.append(packageUris) return allUris ## # Translate URI data to JavaScript # using Package objects def packageUrisToJS(packages, version): allUris = [] for packageId, package in enumerate(packages): packageUris = [] if package.file: # build namespace = "__out__" fileId = package.file relpath = OsPath(fileId) shortUri = Uri(relpath.toUri()) packageUris.append("%s:%s" % (namespace, shortUri.encodedValue())) else: # "source" : for clazz in package.classes: namespace = self._classes[clazz]["namespace"] relpath = OsPath(self._classes[clazz]["relpath"]) shortUri = Uri(relpath.toUri()) packageUris.append("%s:%s" % (namespace, shortUri.encodedValue())) allUris.append(packageUris) return allUris def loadTemplate(bootCode): # try custom loader templates loaderFile = compConf.get("paths/loader-template", None) if not loaderFile: # use default templates if version=="build": #loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader-build.tmpl.js") # TODO: test-wise using generic template loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader.tmpl.js") else: #loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader-source.tmpl.js") loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader.tmpl.js") template = filetool.read(loaderFile) return template # --------------------------------------------------------------- if not parts: return "" result = "" vals = {} packages = script.packagesSortedSimple() loader_with_boot = self._job.get("packages/loader-with-boot", True) # stringify data in globalCodes for entry in globalCodes: globalCodes[entry] = json.dumpsCode(globalCodes[entry]) # undo damage done by simplejson to raw strings with escapes \\ -> \ globalCodes[entry] = globalCodes[entry].replace('\\\\\\', '\\').replace(r'\\', '\\') # " gets tripple escaped, therefore the first .replace() vals.update(globalCodes) if version=="build": vals["Resources"] = json.dumpsCode({}) # TODO: undo Resources from globalCodes!!! vals["Boot"] = '"%s"' % boot if version == "build": vals["BootPart"] = bootCode else: vals["BootPart"] = "" # fake package data for key, package in enumerate(packages): vals["BootPart"] += "qx.$$packageData['%d']={};\n" % key # Translate part information to JavaScript vals["Parts"] = partsMap(script) # Translate URI data to JavaScript #vals["Uris"] = packageUrisToJS1(packages, version) vals["Uris"] = packageUrisToJS(packages, version) vals["Uris"] = json.dumpsCode(vals["Uris"]) # Add potential extra scripts vals["UrisBefore"] = [] if self._job.get("add-script", False): additional_scripts = self._job.get("add-script",[]) for additional_script in additional_scripts: vals["UrisBefore"].append(additional_script["uri"]) vals["UrisBefore"] = json.dumpsCode(vals["UrisBefore"]) # Whether boot package is inline if version == "source": vals["BootIsInline"] = json.dumpsCode(False) else: vals["BootIsInline"] = json.dumpsCode(loader_with_boot) # Closure package information cParts = {} if version == "build": for part in script.parts: if not loader_with_boot or part != "boot": cParts[part] = True vals["ClosureParts"] = json.dumpsCode(cParts) # Package Hashes vals["PackageHashes"] = {} for key, package in enumerate(packages): if package.hash: vals["PackageHashes"][key] = package.hash else: vals["PackageHashes"][key] = "%d" % key # fake code package hashes in source ver. vals["PackageHashes"] = json.dumpsCode(vals["PackageHashes"]) # Script hook for qx.$$loader.decodeUris() function vals["DecodeUrisPlug"] = "" if decodeUrisFile: plugCode = filetool.read(self._config.absPath(decodeUrisFile)) # let it bomb if file can't be read vals["DecodeUrisPlug"] = plugCode.strip() # Enable "?nocache=...." for script loading? vals["NoCacheParam"] = "true" if self._job.get("compile-options/uris/add-nocache-param", True) else "false" # Locate and load loader basic script template = loadTemplate(bootCode) # Fill template gives result result = fillTemplate(vals, template) return result
def generateLoader(script, compConf, globalCodes, bootCode='', ): self._console.info("Generate loader script") result = "" vals = {} if not self._job.get("packages/i18n-with-boot", True): # remove I18N info from globalCodes, so they don't go into the loader globalCodes["Translations"] = {} globalCodes["Locales"] = {} else: if script.buildType == "build": # also remove them here, as this info is now with the packages globalCodes["Translations"] = {} globalCodes["Locales"] = {} if not script.parts: return result # stringify data in globalCodes for entry in globalCodes: globalCodes[entry] = json.dumpsCode(globalCodes[entry]) # undo damage done by simplejson to raw strings with escapes \\ -> \ globalCodes[entry] = globalCodes[entry].replace('\\\\\\', '\\').replace(r'\\', '\\') # " gets tripple escaped, therefore the first .replace() vals.update(globalCodes) if script.buildType =="build": vals["Resources"] = json.dumpsCode({}) # TODO: undo Resources from globalCodes!!! # Name of the boot part vals["Boot"] = loaderBootName(script, compConf) # Code (pot.) of the boot part vals["BootPart"] = loaderBootPart(script, compConf, bootCode) # Translate part information to JavaScript vals["Parts"] = loaderPartsMap(script, compConf) # Translate URI data to JavaScript #vals["Uris"] = loaderScriptUris(script, compConf) # Translate URI data to JavaScript vals["Packages"] = loaderPackages(script, compConf) # Add potential extra scripts vals["UrisBefore"] = loaderUrisBefore(script, compConf) # Add potential extra css vals["CssBefore"] = loaderCssBefore(script, compConf) # Whether boot package is inline vals["BootIsInline"] = loaderBootInline(script, compConf) # Closure package information vals["ClosureParts"] = loaderClosureParts(script, compConf) # Package Hashes #vals["PackageHashes"] = loaderPackageHashes(script, compConf) # Script hook for qx.$$loader.decodeUris() function vals["DecodeUrisPlug"] = loaderDecodeUrisPlug(script, compConf) # Enable "?nocache=...." for script loading? vals["NoCacheParam"] = loaderNocacheParam(script, compConf) # Locate and load loader template template, templatePath = loaderTemplate(script, compConf) # Fill template gives result try: result = loaderFillTemplate(vals, template) except KeyError, e: raise ValueError("Unknown macro used in loader template (%s): '%s'" % (templatePath, e.args[0]))
def do_GET(self): # Mute error messages for favicon.ico requests if self.path == "/favicon.ico": self.send_response(404) self.finish() # Support for active reload # perform a check when the sentinel url is requested elif (self.ar_is_active() and self.path.startswith(AR_Check_Url)): console = Context.console # Get 'since' query parm if self.path.find('?') != -1: self.path, self.query = self.path.split('?', 1) else: self.query = '' query_map = cgi.parse_qs(self.query) assert query_map["since"] since = float(query_map["since"][0]) #ret = 200 if self.check_reload() else 304 # 304=not modified # Return Json data resp_data = {"changed": False} if self.check_reload(since): resp_data["changed"] = True console.info("%s - Signalling reload" % (datetime.datetime.now(), )) resp_string = "qx_AR.script_callback(%s)" % json.dumpsCode( resp_data) self.send_response(200) self.send_header('Content-type', 'text/javascript') self.end_headers() self.wfile.write(resp_string) self.finish() # deliver the active_reload.js when the script url is requested # - this is interesting when the main app is run through different web server elif (self.ar_is_active() and self.path == AR_Script_Url): scriptfile = codecs.open(live_reload.lreload_script, "r", "utf-8") self.send_response(200) self.send_header('Content-type', 'text/javascript') self.end_headers() self.insert_ar_script(scriptfile, self.wfile) scriptfile.close() self.finish() # insert active_reload.js text into index.html # - this is interesting when serving the main app through this web server elif (self.ar_is_active() and self.path == live_reload.app_url): file_path = self.translate_path(self.path) indexfile = codecs.open(file_path, "r", "utf-8") self.send_response(200) self.send_header('Content-type', 'text/html') self.end_headers() #indexfile = self.send_head() # sets Content-Length! out = self.wfile insert_before_tag = "</head>" for line in indexfile: if insert_before_tag in line: before, after = line.split(insert_before_tag, 1) out.write(before) out.write(' <script type="text/javascript" ') out.write('src="%s%s">' % (live_reload.server_url, AR_Script_Url)) out.write("</script>\n") out.write(insert_before_tag) out.write(after) else: out.write(line) indexfile.close() self.finish() # normal file serving else: CGIHTTPServer.CGIHTTPRequestHandler.do_GET(self)
def loaderCssBefore(script, compConf): cssBefore = [] additional_csses = self._job.get("add-css",[]) for additional_css in additional_csses: cssBefore.append(additional_css["uri"]) return json.dumpsCode(cssBefore)
def runImageCombining(jobconf, confObj): def extractFromPrefixSpec(prefixSpec): prefix = altprefix = "" if not prefixSpec or not isinstance(prefixSpec, types.ListType): if jobconf.get("config-warnings/combine-images", True): console.warn("Missing or incorrect prefix spec, might lead to incorrect resource id's.") elif len(prefixSpec) == 2 : # prefixSpec = [ prefix, altprefix ] prefix, altprefix = prefixSpec elif len(prefixSpec) == 1: prefix = prefixSpec[0] altprefix = "" return prefix, altprefix ## # strip prefix - if available - from imagePath, and replace by altprefix def getImageId(imagePath, prefixSpec): prefix, altprefix = extractFromPrefixSpec(prefixSpec) imageId = imagePath # init _, imageId, _ = Path.getCommonPrefix(imagePath, prefix) # assume: imagePath = prefix "/" imageId if altprefix: imageId = altprefix + "/" + imageId imageId = Path.posifyPath(imageId) return imageId ## # create a dict with the clipped image file path as key, and prefix elements as value def getClippedImagesDict(imageSpec): imgDict = {} inputStruct = imageSpec['input'] for group in inputStruct: prefixSpec = group.get('prefix', []) prefix, altprefix = extractFromPrefixSpec(prefixSpec) if prefix: prefix = confObj.absPath(prefix) for filepatt in group['files']: num_files = 0 for file in glob.glob(confObj.absPath(filepatt)): # resolve file globs - TODO: can be removed in generator.action.ImageClipping console.debug("adding image %s" % file) imgDict[file] = [prefix, altprefix] num_files += 1 if num_files == 0: raise ValueError("Non-existing file spec: %s" % filepatt) return imgDict # ---------------------------------------------------------------------- if not jobconf.get("combine-images", False): return console = Context.console cache = Context.cache console.info("Combining images...") console.indent() imageClipper = ImageClipping(console, cache, jobconf) images = jobconf.get("combine-images/images", {}) for image, imgspec in images.iteritems(): console.info("Creating image %s" % image) console.indent() imageId= getImageId(image, imgspec.get('prefix', [])) image = confObj.absPath(image) # abs output path config = {} # create a dict of clipped image objects - for later look-up clippedImages = getClippedImagesDict(imgspec) # collect list of all input files, no matter where they come from input = sorted(clippedImages.keys()) # collect layout property if 'layout' in imgspec: layout = imgspec['layout'] == "horizontal" else: layout = "horizontal" == "horizontal" # default horizontal=True # get type of combined image (png, base64, ...) combtype = "base64" if image.endswith(".b64.json") else "extension" # create the combined image subconfigs = imageClipper.combine(image, input, layout, combtype) # for the meta information, go through the list of returned subconfigs (one per clipped image) for sub in subconfigs: x = Image() x.combId, x.left, x.top, x.width, x.height, x.format = ( imageId, sub['left'], sub['top'], sub['width'], sub['height'], sub['type']) subId = getImageId(sub['file'], clippedImages[sub['file']]) config[subId] = x.toMeta() # store meta data for this combined image bname = os.path.basename(image) ri = bname.rfind('.') if ri > -1: bname = bname[:ri] bname += '.meta' meta_fname = os.path.join(os.path.dirname(image), bname) console.debug("writing meta file %s" % meta_fname) filetool.save(meta_fname, json.dumps(config, ensure_ascii=False, sort_keys=True)) console.outdent() # handle base64 type, need to write "combined image" to file if combtype == "base64": combinedMap = {} for sub in subconfigs: subMap = {} subId = getImageId(sub['file'], clippedImages[sub['file']]) subMap['width'] = sub['width'] subMap['height'] = sub['height'] subMap['type'] = sub['type'] subMap['encoding'] = sub['encoding'] subMap['data'] = sub['data'] combinedMap[subId] = subMap filetool.save(image, json.dumpsCode(combinedMap)) console.outdent() return
def loaderPackageHashes(script, compConf): packageHashes = {} for pos, package in enumerate(script.packagesSorted()): packageHashes[pos] = "%d" % package.id return json.dumpsCode(packageHashes)