Beispiel #1
0
    def depsToFlareFile(classDepsIter, depsLogConf):
        data = {}
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if classId not in data:
                data[classId] = {}
                data[classId]['name'] = classId
                data[classId]["size"] = 1000
                data[classId]["imports"] = []

            if loadOrRun == 'load':
                data[classId]['imports'].append(depId)

        output = []
        for cid in data.keys():
            output.append(data[cid])

        file = depsLogConf.get('flare/file', "flare.json")
        console.info("Writing dependency data to file: %s" % file)
        pretty = depsLogConf.get('flare/pretty', None)
        if pretty:
            indent = 2
            separators = (', ', ': ')
        else:
            indent = None
            separators = (',', ':')
        filetool.save(
            file,
            json.dumps(output,
                       sort_keys=True,
                       indent=indent,
                       separators=separators))

        return
Beispiel #2
0
def runFix(jobconf, classesObj):

    def fixPng():
        return

    def removeBOM(fpath):
        content = open(fpath, "rb").read()
        if content.startswith(codecs.BOM_UTF8):
            console.debug("removing BOM: %s" % filePath)
            open(fpath, "wb").write(content[len(codecs.BOM_UTF8):])
        return

    # - Main ---------------------------------------------------------------

    if not isinstance(jobconf.get("fix-files", False), types.DictType):
        return

    console = Context.console
    classes = classesObj.keys()
    fixsettings = ExtMap(jobconf.get("fix-files"))

    # Fixing JS source files
    console.info("Fixing whitespace in source files...")
    console.indent()

    console.info("Fixing files: ", False)
    numClasses = len(classes)
    eolStyle = fixsettings.get("eol-style", "LF")
    tabWidth = fixsettings.get("tab-width", 2)
    for pos, classId in enumerate(classes):
        console.progress(pos+1, numClasses)
        classEntry   = classesObj[classId]
        filePath     = classEntry.path
        fileEncoding = classEntry.encoding
        fileContent  = filetool.read(filePath, fileEncoding)
        # Caveat: as filetool.read already calls any2Unix, converting to LF will
        # not work as the file content appears unchanged to this function
        if eolStyle == "CR":
            fixedContent = textutil.any2Mac(fileContent)
        elif eolStyle == "CRLF":
            fixedContent = textutil.any2Dos(fileContent)
        else:
            fixedContent = textutil.any2Unix(fileContent)
        fixedContent = textutil.normalizeWhiteSpace(textutil.removeTrailingSpaces(textutil.tab2Space(fixedContent, tabWidth)))
        if fixedContent != fileContent:
            console.debug("modifying file: %s" % filePath)
            filetool.save(filePath, fixedContent, fileEncoding)
        # this has to go separate, as it requires binary operation
        removeBOM(filePath)

    console.outdent()

    # Fixing PNG files -- currently just a stub!
    if fixsettings.get("fix-png", False):
        console.info("Fixing PNGs...")
        console.indent()
        fixPng()
        console.outdent()

    return
Beispiel #3
0
def migrateFile(filePath, compiledPatches, compiledInfos, patchFile, options=None, encoding="UTF-8"):

    logging.info("  - File: %s" % filePath)

    # Read in original content
    fileContent = filetool.read(filePath, encoding)

    fileId = extractFileContentId(fileContent)

    # Apply patches
    patchedContent = fileContent

    if patchFile and fileId is not None:

        # import patch
        patch = {}
        execfile(patchFile, patch)
        tree = treegenerator.createFileTree(tokenizer.Tokenizer().parseStream(fileContent))

        # If there were any changes, compile the result
        if patch["patch"](fileId, tree):
            options.prettyPrint = True  # make sure it's set
            result = [u""]
            # result = pretty.prettyNode(tree, options, result)
            result = formatter_.formatNode(tree, options, result)
            patchedContent = u"".join(result)

    # apply RE patches
    patchedContent = regtool(patchedContent, compiledPatches, True, filePath)
    patchedContent = regtool(patchedContent, compiledInfos, False, filePath)

    # Write file
    if patchedContent != fileContent:
        logging.info("    - %s has been modified. Storing modifications ..." % filePath)
        filetool.save(filePath, patchedContent, encoding)
Beispiel #4
0
def migrateFile(
                filePath, compiledPatches, compiledInfos,
                hasPatchModule=False, options=None, encoding="UTF-8"):

    logging.info("  - File: %s" % filePath)

    # Read in original content
    fileContent = filetool.read(filePath, encoding)

    fileId = extractFileContentId(fileContent);

    # Apply patches
    patchedContent = fileContent

    if hasPatchModule and fileId is not None:

        import patch
        tree = treegenerator.createSyntaxTree(tokenizer.parseStream(fileContent))

        # If there were any changes, compile the result
        if patch.patch(fileId, tree):
            options.prettyPrint = True  # make sure it's set
            result = [u'']
            result = pretty.prettyNode(tree, options, result)
            patchedContent = u''.join(result)

    # apply RE patches
    patchedContent = regtool(patchedContent, compiledPatches, True, filePath)
    patchedContent = regtool(patchedContent, compiledInfos, False, filePath)

    # Write file
    if patchedContent != fileContent:
        logging.info("    - %s has been modified. Storing modifications ..." % filePath)
        filetool.save(filePath, patchedContent, encoding)
Beispiel #5
0
    def depsToFlareFile(classDepsIter, depsLogConf):
        data = {}
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if classId not in data:
                data[classId] = {}
                data[classId]['name'] = classId
                data[classId]["size"] = 1000
                data[classId]["imports"] = []

            if loadOrRun == 'load':
                data[classId]['imports'].append(depId)

        output = []
        for cid in data.keys():
            output.append(data[cid])

        file = depsLogConf.get('flare/file', "flare.json")
        console.info("Writing dependency data to file: %s" % file)
        pretty = depsLogConf.get('flare/pretty', None)
        if pretty:
            indent = 2
            separators = (', ', ': ')
        else:
            indent = None
            separators = (',', ':')
        filetool.save(file, json.dumps(output, sort_keys=True, indent=indent, separators=separators))

        return
Beispiel #6
0
def _handleResources(script, generator, filtered=True):

    def createResourceInfo(res, resval):
        resinfo = [ { "target": "resource", "data": { res : resval }} ]
        #filetool.save(approot+"/data/resource/" + res + ".json", json.dumpsCode(resinfo))
        return resinfo

    def copyResource(res, library):
        sourcepath = os.path.join(library._resourcePath, res)
        targetpath = approot + "/resource/" + res
        filetool.directory(os.path.dirname(targetpath))
        shutil.copy(sourcepath, targetpath)
        return

    # ----------------------------------------------------------------------
    context.console.info("Processing resources: ", False)
    approot = context.jobconf.get("provider/app-root", "./provider")
    filetool.directory(approot+"/data")
    filetool.directory(approot+"/resource")
    
    # quick copy of runLogResources, for fast results
    packages   = script.packagesSorted()
    parts      = script.parts
    variants   = script.variants

    allresources = {}
    if filtered:
        # -- the next call is fake, just to populate package.data.resources!
        _ = generator._codeGenerator.generateResourceInfoCode(script, generator._settings, context.jobconf.get("library",[]))
        for packageId, package in enumerate(packages):
            allresources.update(package.data.resources)
    else:
        # get the main library
        mainlib = [x for x in script.libraries if x.namespace == script.namespace][0]
        reslist = mainlib.getResources()
        allresources = ResourceHandler.createResourceStruct(reslist, updateOnlyExistingSprites = False)

    # get resource info
    resinfos = {}
    numResources = len(allresources)
    for num,res in enumerate(allresources):
        context.console.progress(num+1, numResources)
        # fake a classId-like resourceId ("a.b.c"), for filter matching
        resId = os.path.splitext(res)[0]
        resId = resId.replace("/", ".")
        if filtered and not passesOutputfilter(resId):
            continue
        resinfos[res] = createResourceInfo(res, allresources[res])
        # extract library name space
        if isinstance(allresources[res], types.ListType): # it's an image = [14, 14, u'png', u'qx' [, u'qx/decoration/Modern/checkradio-combined.png', 0, 0]]
            library_ns = allresources[res][3]
        else: # html page etc. = "qx"
            library_ns = allresources[res]
        if library_ns:  # library_ns == '' means embedded image -> no copying
            library    = libraries[library_ns]
            copyResource(res, library)

    filetool.save(approot+"/data/resource/resources.json", json.dumpsCode(resinfos))

    return
Beispiel #7
0
    def depsToJsonFile(classDepsIter, depsLogConf):
        data = {}
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if classId not in data:
                data[classId] = {}
                data[classId]["load"] = []
                data[classId]["run"] = []

            data[classId][loadOrRun].append(depId)

        file = depsLogConf.get('json/file', "deps.json")
        console.info("Writing dependency data to file: %s" % file)
        pretty = depsLogConf.get('json/pretty', None)
        if pretty:
            indent = 2
            separators = (', ', ': ')
        else:
            indent = None
            separators = (',', ':')
        filetool.save(
            file,
            json.dumps(data,
                       sort_keys=True,
                       indent=indent,
                       separators=separators))

        return
Beispiel #8
0
def migrateFile(
                filePath, compiledPatches, compiledInfos,
                hasPatchModule=False, options=None, encoding="UTF-8"):

    logging.info("  - File: %s" % filePath)

    # Read in original content
    fileContent = filetool.read(filePath, encoding)

    fileId = extractFileContentId(fileContent);

    # Apply patches
    patchedContent = fileContent

    if hasPatchModule and fileId is not None:

        import patch
        tree = treegenerator.createFileTree(tokenizer.parseStream(fileContent))

        # If there were any changes, compile the result
        if patch.patch(fileId, tree):
            options.prettyPrint = True  # make sure it's set
            result = [u'']
            result = pretty.prettyNode(tree, options, result)
            patchedContent = u''.join(result)

    # apply RE patches
    patchedContent = regtool(patchedContent, compiledPatches, True, filePath)
    patchedContent = regtool(patchedContent, compiledInfos, False, filePath)

    # Write file
    if patchedContent != fileContent:
        logging.info("    - %s has been modified. Storing modifications ..." % filePath)
        filetool.save(filePath, patchedContent, encoding)
def generateHttpdConfig(jobconf, confObj):
    console = Context.console
    # read config
    jconf_app_namespace = jobconf.get("let/APPLICATION")
    assert jconf_app_namespace
    jconf_conf_dir = jobconf.get("web-server-config/output-dir", ".")
    jconf_conf_dir = confObj.absPath(jconf_conf_dir)
    jconf_template_dir = jobconf.get("web-server-config/template-dir")
    assert jconf_template_dir
    jconf_httpd_type = jobconf.get("web-server-config/httpd-type", "apache2")
    jconf_httpd_hosturl = jobconf.get("web-server-config/httpd-host-url",
                                      "http://localhost")

    libs = jobconf.get("library", [])
    for lib in libs:
        lib._init_from_manifest()

    config_path = os.path.join(jconf_conf_dir, jconf_httpd_type + ".conf")
    template_path = os.path.join(jconf_template_dir,
                                 "httpd." + jconf_httpd_type + ".tmpl.conf")
    alias_path = jconf_app_namespace.replace(".", "/")

    # collect config values
    value_map = {
        "APP_HTTPD_CONFIG": "",
        "LOCALHOST_APP_URL": "",
        "APP_NAMESPACE_AS_PATH": "",
        "APP_DOCUMENT_ROOT": "",
    }

    value_map['APP_HTTPD_CONFIG'] = config_path

    doc_root = jobconf.get("web-server-server/document-root",
                           "") or get_doc_root(jobconf, confObj)
    doc_root = os.path.normpath(
        confObj.absPath(doc_root))  # important to normpath() coz '\' vs. '/'
    value_map['APP_DOCUMENT_ROOT'] = ensure_trailing_slash(doc_root)

    app_web_path = from_doc_root_to_app_root(jobconf, confObj, doc_root)
    value_map['LOCALHOST_APP_URL'] = "/".join(
        (jconf_httpd_hosturl, alias_path, app_web_path))

    value_map['APP_NAMESPACE_AS_PATH'] = alias_path

    # load httpd-specific template
    config_templ = filetool.read(template_path)
    # replace macros
    config_templ = string.Template(config_templ)
    config = config_templ.safe_substitute(value_map)
    # write .conf file
    console.info("Writing configuration file for '%s': '%s'" %
                 (jconf_httpd_type, config_path))
    filetool.save(config_path, config)
    console.info(
        "See the file's comments how to integrate it with the web server configuration"
    )
    console.info("Then open your source application with '%s'" %
                 value_map['LOCALHOST_APP_URL'])
Beispiel #10
0
def runFontMap(jobconf, confObj):

    if not jobconf.get("font-map", False):
        return

    console = Context.console
    cache = Context.cache

    # Test for fontforge
    try:
        import fontforge
    except ImportError:
        console.error(
            "Font map generation is not possible: fontforge is missing")
        return

    console.info("Generating font map...")
    console.indent()

    done = []

    fonts = jobconf.get("font-map/fonts", {})
    for font, fontspec in fonts.iteritems():
        alias = fontspec["alias"] or font.fontfamily

        if alias in done:
            continue
        done.append(alias)

        config = {
            "alias": alias or font.fontfamily,
            "size": fontspec["size"] or 48,
            "mapping": {}
        }

        fo = fontforge.open(font)

        for glyph in fo:
            go = fo[glyph]
            if go.unicode > 0:
                config["mapping"][go.glyphname] = [
                    go.unicode,
                    round(go.width / float(go.vwidth), 3)
                ]

        # store meta data for this font
        bname = os.path.basename(font)
        ri = bname.rfind('.')
        if ri > -1:
            bname = bname[:ri]
        bname += '.meta'
        meta_fname = os.path.join(os.path.dirname(font), bname)
        console.debug("writing meta file %s" % meta_fname)
        filetool.save(meta_fname,
                      json.dumps(config, ensure_ascii=False, sort_keys=True))

    console.outdent()
    return
Beispiel #11
0
def _handleI18N(script, generator):
    context.console.info("Processing localisation data")
    context.console.indent()
    approot = context.jobconf.get("provider/app-root", "./provider")

    # get class projection
    class_list = []
    needs_cldr = False
    for classObj in script.classesObj:
        if passesOutputfilter(classObj.id):
            class_list.append(classObj.id)
            if not needs_cldr and classObj.getHints('cldr'):
                needs_cldr = True

    # get i18n data
    context.console.info("Getting translations")
    trans_dat = generator._locale.getTranslationData(class_list, script.variants, script.locales, 
                                                       addUntranslatedEntries=True)
    loc_dat   = None
    if needs_cldr:
        context.console.info("Getting CLDR data")
        loc_dat   = generator._locale.getLocalizationData(class_list, script.locales)


    # write translation and cldr files
    context.console.info("Writing localisation files: ", False)
    numTrans = len(trans_dat)
    for num,lang in enumerate(trans_dat):
        context.console.progress(num+1, numTrans)

        # translations
        transmap  = {}
        filename = "i18n-" + lang
        targetname = "i18n-" + lang
        translations = trans_dat[lang]
        for key in translations:
            if translations[key]:
                transmap[key] = [ { "target" : targetname, "data" : { key : translations[key] }} ]
            else:
                transmap[key] = [ ]
        filetool.save(approot+"/data/translation/"+filename+".json", json.dumpsCode(transmap))
        
        # cldr
        localemap = {}
        filename = "locale-" + lang
        targetname = "locale-" + lang
        if loc_dat:
            # sample: { "cldr" : [ { "target" : "locale-en", "data" : {"alternativeQuotationEnd":'"', "cldr_am": "AM",...}} ]}
            localekeys = loc_dat[lang]
            cldr_entry = [ { "target" : targetname, "data" : { }} ]
            for key in localekeys:
                if localekeys[key]:
                    cldr_entry[0]['data'][key] = localekeys[key]
            localemap['cldr'] = cldr_entry
            filetool.save(approot+"/data/locale/"+filename+".json", json.dumpsCode(localemap))

    context.console.outdent()
    return
Beispiel #12
0
def runFontMap(jobconf, confObj):

    if not jobconf.get("font-map", False):
        return

    console = Context.console
    cache = Context.cache

    # Test for fontforge
    try:
        import fontforge
    except ImportError:
        console.error("Font map generation is not possible: fontforge is missing")
        return

    console.info("Generating font map...")
    console.indent()

    done = []

    fonts = jobconf.get("font-map/fonts", {})
    for font, fontspec in fonts.iteritems():
        alias = fontspec["alias"] or font.fontfamily

        if alias in done:
            continue
        done.append(alias);

        config = {
          "alias" : alias or font.fontfamily,
          "size" : fontspec["size"] or 48,
          "mapping" : {}
        }

        fo = fontforge.open(font)

        for glyph in fo:
            go = fo[glyph]
            if go.unicode > 0:
                config["mapping"][go.glyphname] = [go.unicode, round(go.width / float(go.vwidth), 3)]

        # store meta data for this font
        bname = os.path.basename(font)
        ri = bname.rfind('.')
        if ri > -1:
            bname = bname[:ri]
        bname += '.meta'
        meta_fname = os.path.join(os.path.dirname(font), bname)
        console.debug("writing meta file %s" % meta_fname)
        filetool.save(meta_fname, json.dumps(config, ensure_ascii=False, sort_keys=True))

    console.outdent()
    return
Beispiel #13
0
    def runPrettyPrinting(self, classes, classesObj):
        "Gather all relevant config settings and pass them to the compiler"

        if not isinstance(self._job.get("pretty-print", False),
                          types.DictType):
            return

        self._console.info("Pretty-printing code...")
        self._console.indent()
        ppsettings = ExtMap(self._job.get(
            "pretty-print"))  # get the pretty-print config settings

        # init options
        parser = optparse.OptionParser()
        compiler.addCommandLineOptions(parser)
        (options, args) = parser.parse_args([])

        # modify according to config
        setattr(options, 'prettyPrint', True)  # turn on pretty-printing
        if ppsettings.get('general/indent-string', False):
            setattr(options, 'prettypIndentString',
                    ppsettings.get('general/indent-string'))
        if ppsettings.get('comments/trailing/keep-column', False):
            setattr(options, 'prettypCommentsTrailingKeepColumn',
                    ppsettings.get('comments/trailing/keep-column'))
        if ppsettings.get('comments/trailing/comment-cols', False):
            setattr(options, 'prettypCommentsTrailingCommentCols',
                    ppsettings.get('comments/trailing/comment-cols'))
        if ppsettings.get('comments/trailing/padding', False):
            setattr(options, 'prettypCommentsInlinePadding',
                    ppsettings.get('comments/trailing/padding'))
        if ppsettings.get('blocks/align-with-curlies', False):
            setattr(options, 'prettypAlignBlockWithCurlies',
                    ppsettings.get('blocks/align-with-curlies'))
        if ppsettings.get('blocks/open-curly/newline-before', False):
            setattr(options, 'prettypOpenCurlyNewlineBefore',
                    ppsettings.get('blocks/open-curly/newline-before'))
        if ppsettings.get('blocks/open-curly/indent-before', False):
            setattr(options, 'prettypOpenCurlyIndentBefore',
                    ppsettings.get('blocks/open-curly/indent-before'))

        self._console.info("Pretty-printing files: ", False)
        numClasses = len(classes)
        for pos, classId in enumerate(classes):
            self._console.progress(pos + 1, numClasses)
            #tree = treeLoader.getTree(classId)
            tree = classesObj[classId].tree()
            compiled = compiler.compile(tree, options)
            filetool.save(self._classes[classId]['path'], compiled)

        self._console.outdent()

        return
Beispiel #14
0
    def writePackages1(self, compiledPackages, script, startId=0):
        for content, resolvedFilePath in zip(compiledPackages, self.packagesFileNames(script.baseScriptPath, len(compiledPackages))):
            # Save result file
            filetool.save(resolvedFilePath, content)

            if script.scriptCompress:
                filetool.gzip(resolvedFilePath, content)

            self._console.debug("Done: %s" % self._computeContentSize(content))
            self._console.debug("")

        return
def main():
    apidata = {}
    apidata['type'] = 'doctree'
    apidata['children'] = []
    apidata['attributes'] = {}
    apidata['children'].append({
        "type": "packages",
        "attributes": {},
        "children": []
    })
    filetool.directory(store_path)

    dirwalker = filetool.find(module_root, r'\.py$')

    for pyfile in dirwalker:
        #if os.stat(pyfile).st_size == 0:
        #    continue
        # get the file's api doc as json
        filejson = pyapi2json(pyfile)
        apipackage = file2package(pyfile, module_root)
        # and store it
        filetool.save(store_path + '/' + apipackage + '.json', filejson)
        # make an entry in apidata struct
        levels = apipackage.split('.')
        curr = apidata['children'][0]['children']
        for pos, level in enumerate(levels):
            if level not in (x['attributes']['name'] for x in curr
                             if 'name' in x['attributes']):
                newentry = {
                    "children": [],
                    "type": "packages" if pos % 2 else "package",
                    "attributes": {
                        "packageName": ".".join(levels[:pos]),
                        "name": level,
                        "fullName": ".".join(levels[:pos + 1])
                    }
                }
                if pos == len(levels) - 1:
                    newentry["externalRef"] = True
                    #del newentry['children']
                    #newentry["type"] = "classes"
                    pass
                curr.append(newentry)
                curr = newentry['children']
            else:
                curr = [
                    x['children'] for x in curr
                    if x['attributes']['name'] == level
                ][0]

    # store apidata
    filetool.save(store_path + '/' + "apidata.json", json.dumps(apidata))
        def writePackage(content, packageId=""):
            # Construct file name
            resolvedFilePath = self._resolveFileName(filePath, variants, settings, packageId)

            # Save result file
            filetool.save(resolvedFilePath, content)

            if compConf.get("paths/gzip"):
                filetool.gzip(resolvedFilePath, content)

            self._console.debug("Done: %s" % self._computeContentSize(content))
            self._console.debug("")

            return
Beispiel #17
0
def generateHttpdConfig(jobconf, confObj):
    console = Context.console
    # read config
    jconf_app_namespace = jobconf.get("let/APPLICATION")
    assert jconf_app_namespace
    jconf_conf_dir = jobconf.get("web-server-config/output-dir", ".")
    jconf_conf_dir = confObj.absPath(jconf_conf_dir)
    jconf_template_dir = jobconf.get("web-server-config/template-dir")
    assert jconf_template_dir
    jconf_httpd_type = jobconf.get("web-server-config/httpd-type", "apache2")
    jconf_httpd_hosturl = jobconf.get("web-server-config/httpd-host-url", "http://localhost")

    libs = jobconf.get("library", [])
    assert libs
    for lib in libs:
        lib._init_from_manifest()

    config_path = os.path.join(jconf_conf_dir, jconf_httpd_type + ".conf")
    template_path = os.path.join(jconf_template_dir, "httpd." + jconf_httpd_type + ".tmpl.conf")
    alias_path = jconf_app_namespace.replace(".", "/")

    # collect config values
    value_map = {
        "APP_HTTPD_CONFIG"      : "",
        "LOCALHOST_APP_URL"     : "",
        "APP_NAMESPACE_AS_PATH" : "",
        "APP_DOCUMENT_ROOT"     : "",
    }

    value_map['APP_HTTPD_CONFIG'] = config_path

    doc_root = get_doc_root(jobconf, confObj)
    value_map['APP_DOCUMENT_ROOT'] = ensure_trailing_slash(doc_root)

    app_web_path = from_doc_root_to_app_root(jobconf, confObj, doc_root)
    value_map['LOCALHOST_APP_URL'] = "/".join((jconf_httpd_hosturl, alias_path, app_web_path))

    value_map['APP_NAMESPACE_AS_PATH'] = alias_path

    # load httpd-specific template
    config_templ = filetool.read(template_path)
    # replace macros
    config_templ = string.Template(config_templ)
    config = config_templ.safe_substitute(value_map)
    # write .conf file
    console.info("Writing configuration file for '%s': '%s'" % (jconf_httpd_type, config_path))
    filetool.save(config_path, config)
    console.info("See the file's comments how to integrate it with the web server configuration")
    console.info("Then open your source application with '%s'" % value_map['LOCALHOST_APP_URL'])
Beispiel #18
0
def _handleResources(script, generator):

    def createResourceInfo(res, resval):
        resinfo = [ { "target": "resource", "data": { res : resval }} ]
        #filetool.save(approot+"/data/resource/" + res + ".json", json.dumpsCode(resinfo))
        return resinfo

    def copyResource(res, library):
        sourcepath = os.path.join(library['path'], library['resource'], res)
        targetpath = approot + "/resource/" + res
        filetool.directory(os.path.dirname(targetpath))
        shutil.copy(sourcepath, targetpath)
        return

    # ----------------------------------------------------------------------
    approot = context.jobconf.get("provider/app-root", "./provider")
    filetool.directory(approot+"/data")
    filetool.directory(approot+"/resource")
    
    # quick copy of runLogResources, for fast results
    packages   = script.packagesSortedSimple()
    parts      = script.parts
    variants   = script.variants

    allresources = {}
    # get resource info
    # -- the next call is fake, just to populate package.data.resources!
    _ = generator._codeGenerator.generateResourceInfoCode(script, generator._settings, context.jobconf.get("library",[]))
    for packageId, package in enumerate(packages):
        allresources.update(package.data.resources)
    
    resinfos = {}
    for res in allresources:
        # fake a classId-like resourceId ("a.b.c"), for filter matching
        resId = os.path.splitext(res)[0]
        resId = resId.replace("/", ".")
        if passesOutputfilter(resId):
            resinfos[res] = createResourceInfo(res, allresources[res])
            # extract library name space
            if isinstance(allresources[res], types.ListType): # it's an image = [14, 14, u'png', u'qx' [, u'qx/decoration/Modern/checkradio-combined.png', 0, 0]]
                library_ns = allresources[res][3]
            else: # html page etc. = "qx"
                library_ns = allresources[res]
            library    = libraries[library_ns]
            copyResource(res, library)

    filetool.save(approot+"/data/resource/resources.json", json.dumpsCode(resinfos))

    return
Beispiel #19
0
def main():
    apidata = {}
    apidata['type'] = 'doctree'
    apidata['children'] = []
    apidata['attributes'] = {}
    apidata['children'].append({
      "type":"packages","attributes":{},"children":[]  
    })
    filetool.directory(store_path)

    dirwalker = filetool.find(module_root, r'\.py$')

    for pyfile in dirwalker:
        #if os.stat(pyfile).st_size == 0:
        #    continue
        # get the file's api doc as json
        filejson = pyapi2json(pyfile)
        apipackage = file2package(pyfile, module_root)
        # and store it
        filetool.save(store_path+'/'+apipackage+'.json', filejson)
        # make an entry in apidata struct
        levels = apipackage.split('.')
        curr = apidata['children'][0]['children']
        for pos,level in enumerate(levels):
            if level not in (x['attributes']['name'] for x in curr if 'name' in x['attributes']):
                newentry = {
                    "children" : [],
                    "type" : "packages" if pos % 2 else "package",
                    "attributes" : {
                        "packageName" : ".".join(levels[:pos]),
                        "name" : level,
                        "fullName" : ".".join(levels[:pos+1])
                    }
                }
                if pos==len(levels)-1:
                    newentry["externalRef"] = True
                    #del newentry['children']
                    #newentry["type"] = "classes"
                    pass
                curr.append(newentry)
                curr = newentry['children']
            else:
                curr = [x['children'] for x in curr if x['attributes']['name']==level][0]
        

    # store apidata
    filetool.save(store_path+'/'+"apidata.json", json.dumps(apidata))
Beispiel #20
0
def _handleI18N(script, generator):
    approot = context.jobconf.get("provider/app-root", "./provider")

    # get class projection
    class_list = []
    needs_cldr = False
    for classObj in script.classesObj:
        if passesOutputfilter(classObj.id):
            class_list.append(classObj.id)
            if not needs_cldr and classObj.getMeta('cldr'):
                needs_cldr = True

    # get i18n data
    trans_dat = generator._locale.getTranslationData_1(class_list, script.variants, script.locales, 
                                                       addUntranslatedEntries=True)
    loc_dat   = None
    if needs_cldr:
        loc_dat   = generator._locale.getLocalizationData(class_list, script.locales)


    # write translation and cldr files
    for lang in trans_dat:
        filename = "i18n-" + lang

        # translations
        transmap  = {}
        translations = trans_dat[lang]
        for key in translations:
            if translations[key]:
                transmap[key] = [ { "target" : "i18n", "data" : { key : translations[key] }} ]
            else:
                transmap[key] = [ ]
        filetool.save(approot+"/data/translation/"+filename+".json", json.dumpsCode(transmap))
        
        # cldr
        localemap = {}
        if loc_dat:
            localekeys = loc_dat[lang]
            for key in localekeys:
                if localekeys[key]:
                    localemap[key] = [ { "target" : "i18n", "data" : { key : localekeys[key] }} ]
                else:
                    localemap[key] = [ ]
            filetool.save(approot+"/data/locale/"+filename+".json", json.dumpsCode(localemap))

    return
Beispiel #21
0
def _handleCode(script, generator):

    approot = context.jobconf.get("provider/app-root", "./provider")
    builds = context.jobconf.get("provider/compile", ["source"])

    for buildtype in builds:
        context.console.info("Processing %s version of classes:\t" % buildtype,
                             False)
        if buildtype == "source":
            targetdir = approot + "/code"
            filetool.directory(targetdir)
        elif buildtype == "build":
            targetdir = approot + "/code-build"
            filetool.directory(targetdir)
            optimize = context.jobconf.get(
                "compile-options/code/optimize",
                ["variables", "basecalls", "strings"])
            variantsettings = context.jobconf.get("variants", {})
            variantSets = util.computeCombinations(variantsettings)
        else:
            raise ConfigurationError("Unknown provider compile type '%s'" %
                                     buildtype)

        numClasses = len(script.classesObj)
        for num, clazz in enumerate(script.classesObj):
            context.console.progress(num + 1, numClasses)
            # register library (for _handleResources)
            if clazz.library.namespace not in libraries:
                libraries[clazz.library.namespace] = clazz.library

            if passesOutputfilter(clazz.id, ):
                classAId = clazz.id.replace(".", "/") + ".js"
                targetpath = targetdir + "/" + classAId
                filetool.directory(os.path.dirname(targetpath))
                if buildtype == "source":
                    shutil.copy(clazz.path, targetpath)
                elif buildtype == "build":
                    compOptions = CompileOptions(
                        optimize, variantSets[0]
                    )  # only support for a single variant set!
                    code = clazz.getCode(compOptions)
                    filetool.save(targetpath, code)

    return
Beispiel #22
0
    def runPrettyPrinting(self, classes, classesObj):
        "Gather all relevant config settings and pass them to the compiler"

        if not isinstance(self._job.get("pretty-print", False), types.DictType):
            return

        self._console.info("Pretty-printing code...")
        self._console.indent()
        ppsettings = ExtMap(self._job.get("pretty-print"))  # get the pretty-print config settings

        # init options
        parser  = optparse.OptionParser()
        compiler.addCommandLineOptions(parser)
        (options, args) = parser.parse_args([])

        # modify according to config
        setattr(options, 'prettyPrint', True)  # turn on pretty-printing
        if ppsettings.get('general/indent-string',False):
            setattr(options, 'prettypIndentString', ppsettings.get('general/indent-string'))
        if ppsettings.get('comments/trailing/keep-column',False):
            setattr(options, 'prettypCommentsTrailingKeepColumn', ppsettings.get('comments/trailing/keep-column'))
        if ppsettings.get('comments/trailing/comment-cols',False):
            setattr(options, 'prettypCommentsTrailingCommentCols', ppsettings.get('comments/trailing/comment-cols'))
        if ppsettings.get('comments/trailing/padding',False):
            setattr(options, 'prettypCommentsInlinePadding', ppsettings.get('comments/trailing/padding'))
        if ppsettings.get('blocks/align-with-curlies',False):
            setattr(options, 'prettypAlignBlockWithCurlies', ppsettings.get('blocks/align-with-curlies'))
        if ppsettings.get('blocks/open-curly/newline-before',False):
            setattr(options, 'prettypOpenCurlyNewlineBefore', ppsettings.get('blocks/open-curly/newline-before'))
        if ppsettings.get('blocks/open-curly/indent-before',False):
            setattr(options, 'prettypOpenCurlyIndentBefore', ppsettings.get('blocks/open-curly/indent-before'))

        self._console.info("Pretty-printing files: ", False)
        numClasses = len(classes)
        for pos, classId in enumerate(classes):
            self._console.progress(pos+1, numClasses)
            #tree = treeLoader.getTree(classId)
            tree = classesObj[classId].tree()
            compiled = compiler.compile(tree, options)
            filetool.save(self._classes[classId]['path'], compiled)

        self._console.outdent()

        return
Beispiel #23
0
    def runPrettyPrinting(self, classesObj):
        if not isinstance(self._job.get("pretty-print", False), types.DictType):
            return

        self._console.info("Pretty-printing code...")
        self._console.indent()
        ppsettings = ExtMap(self._job.get("pretty-print"))  # get the pretty-print config settings

        # init options
        def options(): pass
        pretty.defaultOptions(options)

        # modify according to config
        if 'general/indent-string' in ppsettings:
            options.prettypIndentString = ppsettings.get('general/indent-string')
        if 'comments/block/add' in ppsettings:
            options.prettypCommentsBlockAdd = ppsettings.get('comments/trailing/keep-column')
        if 'comments/trailing/keep-column' in ppsettings:
            options.prettypCommentsTrailingKeepColumn = ppsettings.get('comments/trailing/keep-column')
        if 'comments/trailing/comment-cols' in ppsettings:
            options.prettypCommentsTrailingCommentCols = ppsettings.get('comments/trailing/comment-cols')
        if 'comments/trailing/padding' in ppsettings:
            options.prettypCommentsInlinePadding = ppsettings.get('comments/trailing/padding')
        if 'code/align-with-curlies' in ppsettings:
            options.prettypAlignBlockWithCurlies = ppsettings.get('code/align-with-curlies')
        if 'code/open-curly/newline-before' in ppsettings:
            options.prettypOpenCurlyNewlineBefore = ppsettings.get('code/open-curly/newline-before')
        if 'code/open-curly/indent-before' in ppsettings:
            options.prettypOpenCurlyIndentBefore = ppsettings.get('code/open-curly/indent-before')

        self._console.info("Pretty-printing files: ", False)
        numClasses = len(classesObj)
        for pos, classId in enumerate(classesObj):
            self._console.progress(pos+1, numClasses)
            tree = classesObj[classId].tree()
            result = [u'']
            result = pretty.prettyNode(tree, options, result)
            compiled = u''.join(result)
            filetool.save(self._classes[classId].path, compiled)

        self._console.outdent()

        return
Beispiel #24
0
def runLogResources(jobconf, script):
    if not isinstance(jobconf.get("log/resources", False), types.DictType):
        return
    console = Context.console
    packages = script.packagesSorted()

    console.info("Dumping resource info...");
    console.indent()

    allresources = {}
    # get resource info
    CodeGenerator.packagesResourceInfo(script) # populate package.data.resources
    for packageId, package in enumerate(packages):
        allresources.update(package.data.resources)

    file_ = jobconf.get("log/resources/file", "resources.json")
    filetool.save(file_, json.dumpsCode(allresources))
    console.outdent()

    return
Beispiel #25
0
def runLogResources(jobconf, script):
    if not isinstance(jobconf.get("log/resources", False), types.DictType):
        return
    console = Context.console
    packages = script.packagesSorted()

    console.info("Dumping resource info...")
    console.indent()

    allresources = {}
    # get resource info
    CodeGenerator.packagesResourceInfo(
        script)  # populate package.data.resources
    for packageId, package in enumerate(packages):
        allresources.update(package.data.resources)

    file_ = jobconf.get("log/resources/file", "resources.json")
    filetool.save(file_, json.dumpsCode(allresources))
    console.outdent()

    return
Beispiel #26
0
    def depsToJsonFile(classDepsIter, depsLogConf):
        data = {}
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if classId not in data:
                data[classId] = {}
                data[classId]["load"] = []
                data[classId]["run"] = []

            data[classId][loadOrRun].append(depId)

        file = depsLogConf.get('json/file', "deps.json")
        console.info("Writing dependency data to file: %s" % file)
        pretty = depsLogConf.get('json/pretty', None)
        if pretty:
            indent     = 2
            separators = (', ', ': ')
        else:
            indent     = None
            separators = (',', ':')
        filetool.save(file, json.dumps(data, sort_keys=True, indent=indent, separators=separators))

        return
Beispiel #27
0
def _handleCode(script, generator):

    approot = context.jobconf.get("provider/app-root", "./provider")
    builds  = context.jobconf.get("provider/compile",  ["source"])

    for buildtype in builds:
        context.console.info("Processing %s version of classes:\t" % buildtype, False)
        if buildtype == "source":
            targetdir = approot + "/code"
            filetool.directory(targetdir)
        elif buildtype == "build":
            targetdir = approot + "/code-build"
            filetool.directory(targetdir)
            optimize = context.jobconf.get("compile-options/code/optimize", ["variables","basecalls","strings"])
            variantsettings = context.jobconf.get("variants", {})
            variantSets = util.computeCombinations(variantsettings)
        else:
            raise ConfigurationError("Unknown provider compile type '%s'" % buildtype)

        numClasses = len(script.classesObj)
        for num, clazz in enumerate(script.classesObj):
            context.console.progress(num+1, numClasses)
            # register library (for _handleResources)
            if clazz.library.namespace not in libraries:
                libraries[clazz.library.namespace] = clazz.library

            if passesOutputfilter(clazz.id, ):
                classAId   = clazz.id.replace(".","/") + ".js"
                targetpath = targetdir + "/" + classAId
                filetool.directory(os.path.dirname(targetpath))
                if buildtype == "source":
                    shutil.copy(clazz.path, targetpath)
                elif buildtype == "build":
                    compOptions = CompileOptions(optimize, variantSets[0]) # only support for a single variant set!
                    code = clazz.getCode(compOptions)
                    filetool.save(targetpath, code)

    return
Beispiel #28
0
 def write_new(config):
     conf_str = json.dumpsPretty(config.getData())
     filetool.save(configPath, conf_str)
     return
def runImageCombining(jobconf, confObj):

    def extractFromPrefixSpec(prefixSpec):
        prefix = altprefix = ""
        if not prefixSpec or not isinstance(prefixSpec, types.ListType):
            if jobconf.get("config-warnings/combine-images", True):
                console.warn("Missing or incorrect prefix spec, might lead to incorrect resource id's.")
        elif len(prefixSpec) == 2 :  # prefixSpec = [ prefix, altprefix ]
            prefix, altprefix = prefixSpec
        elif len(prefixSpec) == 1:
            prefix            = prefixSpec[0]
            altprefix         = ""
        return prefix, altprefix

    ##
    # strip prefix - if available - from imagePath, and replace by altprefix
    def getImageId(imagePath, prefixSpec):
        prefix, altprefix = extractFromPrefixSpec(prefixSpec)
        imageId = imagePath # init
        _, imageId, _ = Path.getCommonPrefix(imagePath, prefix) # assume: imagePath = prefix "/" imageId
        if altprefix:
            imageId   = altprefix + "/" + imageId

        imageId = Path.posifyPath(imageId)
        return imageId

    ##
    # create a dict with the clipped image file path as key, and prefix elements as value
    def getClippedImagesDict(imageSpec):
        imgDict = {}
        inputStruct = imageSpec['input']
        for group in inputStruct:
            prefixSpec = group.get('prefix', [])
            prefix, altprefix = extractFromPrefixSpec(prefixSpec)
            if prefix:
                prefix = confObj.absPath(prefix)
            for filepatt in group['files']:
                num_files = 0
                for file in glob.glob(confObj.absPath(filepatt)):  # resolve file globs - TODO: can be removed in generator.action.ImageClipping
                    console.debug("adding image %s" % file)
                    imgDict[file]    = [prefix, altprefix]
                    num_files       += 1
                if num_files == 0:
                    raise ValueError("Non-existing file spec: %s" % filepatt)

        return imgDict

    # ----------------------------------------------------------------------

    if not jobconf.get("combine-images", False):
        return
    
    console = Context.console
    cache = Context.cache

    console.info("Combining images...")
    console.indent()
    imageClipper = ImageClipping(console, cache, jobconf)

    images = jobconf.get("combine-images/images", {})
    for image, imgspec in images.iteritems():
        console.info("Creating image %s" % image)
        console.indent()
        imageId= getImageId(image, imgspec.get('prefix', []))
        image  = confObj.absPath(image)  # abs output path
        config = {}

        # create a dict of clipped image objects - for later look-up
        clippedImages = getClippedImagesDict(imgspec)

        # collect list of all input files, no matter where they come from
        input = sorted(clippedImages.keys())

        # collect layout property
        if 'layout' in imgspec:
            layout = imgspec['layout'] == "horizontal"
        else:
            layout = "horizontal" == "horizontal" # default horizontal=True

        # get type of combined image (png, base64, ...)
        combtype = "base64" if image.endswith(".b64.json") else "extension"

        # create the combined image
        subconfigs = imageClipper.combine(image, input, layout, combtype)

        # for the meta information, go through the list of returned subconfigs (one per clipped image)
        for sub in subconfigs:
            x = Image()
            x.combId, x.left, x.top, x.width, x.height, x.format = (
               imageId, sub['left'], sub['top'], sub['width'], sub['height'], sub['type'])
            subId = getImageId(sub['file'], clippedImages[sub['file']])
            config[subId] = x.toMeta()

        # store meta data for this combined image
        bname = os.path.basename(image)
        ri = bname.rfind('.')
        if ri > -1:
            bname = bname[:ri]
        bname += '.meta'
        meta_fname = os.path.join(os.path.dirname(image), bname)
        console.debug("writing meta file %s" % meta_fname)
        filetool.save(meta_fname, json.dumps(config, ensure_ascii=False, sort_keys=True))
        console.outdent()

        # handle base64 type, need to write "combined image" to file
        if combtype == "base64":
            combinedMap = {}
            for sub in subconfigs:
                subMap = {}
                subId  = getImageId(sub['file'], clippedImages[sub['file']])
                subMap['width']    = sub['width']
                subMap['height']   = sub['height']
                subMap['type']     = sub['type']
                subMap['encoding'] = sub['encoding']
                subMap['data']     = sub['data']
                combinedMap[subId] = subMap
            filetool.save(image, json.dumpsCode(combinedMap))

    console.outdent()

    return
Beispiel #30
0
    def runCompiled(self, script, treeCompiler, version="build"):

        def getOutputFile(compileType):
            filePath = compConf.get("paths/file")
            if not filePath:
                filePath = os.path.join(compileType, "script", script.namespace + ".js")
            return filePath

        def getFileUri(scriptUri):
            appfile = os.path.basename(fileRelPath)
            fileUri = os.path.join(scriptUri, appfile)  # make complete with file name
            fileUri = Path.posifyPath(fileUri)
            return fileUri

        ##
        # returns the Javascript code for the initial ("boot") script as a string,
        #  using the loader.tmpl template and filling its placeholders
        def generateBootCode(parts, packages, boot, script, compConf, variants, settings, bootCode, globalCodes, version="source", decodeUrisFile=None, format=False):

            ##
            # create a map with part names as key and array of package id's and
            # return as string
            def partsMap(script):
                partData = {}
                packages = script.packagesSortedSimple()
                #print "packages: %r" % packages
                for part in script.parts:
                    partData[part] = script.parts[part].packagesAsIndices(packages)
                    #print "part '%s': %r" % (part, script.parts[part].packages)
                partData = json.dumpsCode(partData)

                return partData

            def fillTemplate(vals, template):
                # Fill the code template with various vals 
                templ  = MyTemplate(template)
                result = templ.safe_substitute(vals)

                return result

            def packageUrisToJS1(packages, version, namespace=None):
                # Translate URI data to JavaScript
                
                allUris = []
                for packageId, package in enumerate(packages):
                    packageUris = []
                    for fileId in package:

                        if version == "build":
                            # TODO: gosh, the next is an ugly hack!
                            #namespace  = self._resourceHandler._genobj._namespaces[0]  # all name spaces point to the same paths in the libinfo struct, so any of them will do
                            if not namespace:
                                namespace  = script.namespace  # all name spaces point to the same paths in the libinfo struct, so any of them will do
                            relpath    = OsPath(fileId)
                        else:
                            namespace  = self._classes[fileId]["namespace"]
                            relpath    = OsPath(self._classes[fileId]["relpath"])

                        shortUri = Uri(relpath.toUri())
                        packageUris.append("%s:%s" % (namespace, shortUri.encodedValue()))
                    allUris.append(packageUris)

                return allUris

            ##
            # Translate URI data to JavaScript
            # using Package objects
            def packageUrisToJS(packages, version):

                allUris = []
                for packageId, package in enumerate(packages):
                    packageUris = []
                    if package.file: # build
                        namespace = "__out__"
                        fileId    = package.file
                        relpath    = OsPath(fileId)
                        shortUri   = Uri(relpath.toUri())
                        packageUris.append("%s:%s" % (namespace, shortUri.encodedValue()))
                    else: # "source" :
                        for clazz in package.classes:
                            namespace  = self._classes[clazz]["namespace"]
                            relpath    = OsPath(self._classes[clazz]["relpath"])
                            shortUri   = Uri(relpath.toUri())
                            packageUris.append("%s:%s" % (namespace, shortUri.encodedValue()))
                    allUris.append(packageUris)

                return allUris


            def loadTemplate(bootCode):
                # try custom loader templates
                loaderFile = compConf.get("paths/loader-template", None)
                if not loaderFile:
                    # use default templates
                    if version=="build":
                        #loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader-build.tmpl.js")
                        # TODO: test-wise using generic template
                        loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader.tmpl.js")
                    else:
                        #loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader-source.tmpl.js")
                        loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader.tmpl.js")
                
                template = filetool.read(loaderFile)

                return template

            # ---------------------------------------------------------------

            if not parts:
                return ""

            result           = ""
            vals             = {}
            packages         = script.packagesSortedSimple()
            loader_with_boot = self._job.get("packages/loader-with-boot", True)

            # stringify data in globalCodes
            for entry in globalCodes:
                globalCodes[entry] = json.dumpsCode(globalCodes[entry])
                # undo damage done by simplejson to raw strings with escapes \\ -> \
                globalCodes[entry] = globalCodes[entry].replace('\\\\\\', '\\').replace(r'\\', '\\')  # " gets tripple escaped, therefore the first .replace()

            vals.update(globalCodes)

            if version=="build":
                vals["Resources"] = json.dumpsCode({})  # TODO: undo Resources from globalCodes!!!
            vals["Boot"] = '"%s"' % boot
            if version == "build":
                vals["BootPart"] = bootCode
            else:
                vals["BootPart"] = ""
                # fake package data
                for key, package in enumerate(packages): 
                    vals["BootPart"] += "qx.$$packageData['%d']={};\n" % key

            # Translate part information to JavaScript
            vals["Parts"] = partsMap(script)

            # Translate URI data to JavaScript
            #vals["Uris"] = packageUrisToJS1(packages, version)
            vals["Uris"] = packageUrisToJS(packages, version)
            vals["Uris"] = json.dumpsCode(vals["Uris"])

            # Add potential extra scripts
            vals["UrisBefore"] = []
            if self._job.get("add-script", False):
                additional_scripts = self._job.get("add-script",[])
                for additional_script in additional_scripts:
                    vals["UrisBefore"].append(additional_script["uri"])
            vals["UrisBefore"] = json.dumpsCode(vals["UrisBefore"])

            # Whether boot package is inline
            if version == "source":
                vals["BootIsInline"] = json.dumpsCode(False)
            else:
                vals["BootIsInline"] = json.dumpsCode(loader_with_boot)
                
            # Closure package information
            cParts = {}
            if version == "build":
                for part in script.parts:
                    if not loader_with_boot or part != "boot":
                        cParts[part] = True
            vals["ClosureParts"] = json.dumpsCode(cParts)

            # Package Hashes
            vals["PackageHashes"] = {}
            for key, package in enumerate(packages):
                if package.hash:
                    vals["PackageHashes"][key] = package.hash
                else:
                    vals["PackageHashes"][key] = "%d" % key  # fake code package hashes in source ver.
            vals["PackageHashes"] = json.dumpsCode(vals["PackageHashes"])

            # Script hook for qx.$$loader.decodeUris() function
            vals["DecodeUrisPlug"] = ""
            if decodeUrisFile:
                plugCode = filetool.read(self._config.absPath(decodeUrisFile))  # let it bomb if file can't be read
                vals["DecodeUrisPlug"] = plugCode.strip()
            
            # Enable "?nocache=...." for script loading?
            vals["NoCacheParam"] = "true" if self._job.get("compile-options/uris/add-nocache-param", True) else "false"

            # Add build details
            vals["Build"] = int(time.time()*1000) 
            vals["Type"] = version
            
            # Locate and load loader basic script
            template = loadTemplate(bootCode)

            # Fill template gives result
            result = fillTemplate(vals, template)

            return result


        ##
        # shallow layer above generateBootCode(), and its only client
        def generateBootScript(globalCodes, script, bootPackage="", compileType="build"):

            self._console.info("Generating boot script...")

            if not self._job.get("packages/i18n-with-boot", True):
                # remove I18N info from globalCodes, so they don't go into the loader
                globalCodes["Translations"] = {}
                globalCodes["Locales"]      = {}
            else:
                if compileType == "build":
                    # also remove them here, as this info is now with the packages
                    globalCodes["Translations"] = {}
                    globalCodes["Locales"]      = {}

            plugCodeFile = compConf.get("code/decode-uris-plug", False)
            if compileType == "build":
                filepackages = [(x.file,) for x in packages]
                bootContent  = generateBootCode(parts, filepackages, boot, script, compConf, variants, settings, bootPackage, globalCodes, compileType, plugCodeFile, format)
            else:
                filepackages = [x.classes for x in packages]
                bootContent  = generateBootCode(parts, filepackages, boot, script, compConf, variants={}, settings={}, bootCode=None, globalCodes=globalCodes, version=compileType, decodeUrisFile=plugCodeFile, format=format)


            return bootContent


        def getPackageData(package):
            data = {}
            data["resources"]    = package.data.resources
            data["translations"] = package.data.translations
            data["locales"]      = package.data.locales
            data = json.dumpsCode(data)
            data += ';\n'
            return data


        def compilePackage(packageIndex, package):
            self._console.info("Compiling package #%s:" % packageIndex, False)
            self._console.indent()

            # Compile file content
            pkgCode = self._treeCompiler.compileClasses(package.classes, variants, optimize, format)
            pkgData = getPackageData(package)
            hash    = sha.getHash(pkgData + pkgCode)[:12]  # first 12 chars should be enough

            isBootPackage = packageIndex == 0
            if isBootPackage:
                compiledContent = ("qx.$$packageData['%s']=" % hash) + pkgData + pkgCode
            else:
                compiledContent  = u'''qx.$$packageData['%s']=%s\n''' % (hash, pkgData)
                compiledContent += u'''qx.Part.$$notifyLoad("%s", function() {\n%s\n});''' % (hash, pkgCode)
            
            #
            package.hash = hash  # to fill qx.$$loader.packageHashes in generateBootScript()

            self._console.debug("Done: %s" % self._computeContentSize(compiledContent))
            self._console.outdent()

            return compiledContent


        ##
        # takes an array of (po-data, locale-data) dict pairs
        # merge all po data and all cldr data in a single dict each
        def mergeTranslationMaps(transMaps):
            poData = {}
            cldrData = {}

            for pac_dat, loc_dat in transMaps:
                for loc in pac_dat:
                    if loc not in poData:
                        poData[loc] = {}
                    poData[loc].update(pac_dat[loc])
                for loc in loc_dat:
                    if loc not in cldrData:
                        cldrData[loc] = {}
                    cldrData[loc].update(loc_dat[loc])

            return (poData, cldrData)


        # -- Main - runCompiled ------------------------------------------------

        # Early return
        compileType = self._job.get("compile/type", "")
        if compileType not in ("build", "source"):
            return

        packages   = script.packagesSortedSimple()
        parts      = script.parts
        boot       = script.boot
        variants   = script.variants
        libraries  = script.libraries

        self._treeCompiler = treeCompiler
        self._variants     = variants
        self._script       = script

        self._console.info("Generate %s version..." % compileType)
        self._console.indent()

        # - Evaluate job config ---------------------
        # Compile config
        compConf = self._job.get("compile-options")
        compConf = ExtMap(compConf)

        # Whether the code should be formatted
        format = compConf.get("code/format", False)
        script.scriptCompress = compConf.get("paths/gzip", False)

        # Read in settings
        settings = self.getSettings()
        script.settings = settings

        # Read libraries
        libs = self._job.get("library", [])

        # Get translation maps
        locales = compConf.get("code/locales", [])
        translationMaps = self.getTranslationMaps(packages, variants, locales)

        # Read in base file name
        fileRelPath = getOutputFile(compileType)
        filePath    = self._config.absPath(fileRelPath)
        script.baseScriptPath = filePath

        if compileType == "build":
            # read in uri prefixes
            scriptUri = compConf.get('uris/script', 'script')
            scriptUri = Path.posifyPath(scriptUri)
            fileUri   = getFileUri(scriptUri)
            # for resource list
            resourceUri = compConf.get('uris/resource', 'resource')
            resourceUri = Path.posifyPath(resourceUri)
        else:
            # source version needs place where the app HTML ("index.html") lives
            self.approot = self._config.absPath(compConf.get("paths/app-root", ""))
            resourceUri = None
            scriptUri   = None

        # Get global script data (like qxlibraries, qxresources,...)
        globalCodes                = {}
        globalCodes["Settings"]    = settings
        globalCodes["Variants"]    = self.generateVariantsCode(variants)
        globalCodes["Libinfo"]     = self.generateLibInfoCode(libs, format, resourceUri, scriptUri)
        # add synthetic output lib
        if scriptUri: out_sourceUri= scriptUri
        else:
            out_sourceUri = self._computeResourceUri({'class': ".", 'path': os.path.dirname(script.baseScriptPath)}, OsPath(""), rType="class", appRoot=self.approot)
            out_sourceUri = os.path.normpath(out_sourceUri.encodedValue())
        globalCodes["Libinfo"]['__out__'] = { 'sourceUri': out_sourceUri }
        globalCodes["Resources"]    = self.generateResourceInfoCode(script, settings, libraries, format)
        globalCodes["Translations"],\
        globalCodes["Locales"]      = mergeTranslationMaps(translationMaps)

        # Potentally create dedicated I18N packages
        i18n_as_parts = not self._job.get("packages/i18n-with-boot", True)
        if i18n_as_parts:
            script = self.generateI18NParts(script, globalCodes)
            self.writePackages([p for p in script.packages if getattr(p, "__localeflag", False)], script)

        if compileType == "build":

            # - Specific job config ---------------------
            # read in compiler options
            optimize = compConf.get("code/optimize", [])
            self._treeCompiler.setOptimize(optimize)

            # - Generating packages ---------------------
            self._console.info("Generating packages...")
            self._console.indent()

            bootPackage = ""
            for packageIndex, package in enumerate(packages):
                package.compiled = compilePackage(packageIndex, package)

            self._console.outdent()
            if not len(packages):
                raise RuntimeError("No valid boot package generated.")

            # - Put loader and packages together -------
            loader_with_boot = self._job.get("packages/loader-with-boot", True)
            # handle loader and boot package
            if not loader_with_boot:
                loadPackage = Package(0)            # make a dummy Package for the loader
                packages.insert(0, loadPackage)

            # attach file names (do this before calling generateBootScript)
            for package, fileName in zip(packages, self.packagesFileNames(script.baseScriptPath, len(packages))):
                package.file = os.path.basename(fileName)
                if self._job.get("compile-options/paths/scripts-add-hash", False):
                    package.file = self._fileNameWithHash(package.file, package.hash)

            # generate and integrate boot code
            if loader_with_boot:
                # merge loader code with first package
                bootCode = generateBootScript(globalCodes, script, packages[0].compiled)
                packages[0].compiled = bootCode
            else:
                loaderCode = generateBootScript(globalCodes, script)
                packages[0].compiled = loaderCode

            # write packages
            self.writePackages(packages, script)

        # ---- 'source' version ------------------------------------------------
        else:

            sourceContent = generateBootScript(globalCodes, script, bootPackage="", compileType=compileType)

            # Construct file name
            resolvedFilePath = self._resolveFileName(filePath, variants, settings)

            # Save result file
            filetool.save(resolvedFilePath, sourceContent)

            if compConf.get("paths/gzip"):
                filetool.gzip(resolvedFilePath, sourceContent)

            self._console.outdent()
            self._console.debug("Done: %s" % self._computeContentSize(sourceContent))
            self._console.outdent()

        self._console.outdent()

        return  # runCompiled()
Beispiel #31
0
 def writeDotFile(gr1, depsLogConf):
     file = depsLogConf.get('dot/file', "deps.dot")
     dot = gr1.write(fmt='dotwt')
     console.info("Writing dependency graph to file: %s" % file)
     filetool.save(file, dot)
     return
Beispiel #32
0
def runFix(jobconf, classesObj):
    def fixPng():
        return

    def removeBOM(fpath):
        content = open(fpath, "rb").read()
        if content.startswith(codecs.BOM_UTF8):
            console.debug("removing BOM: %s" % filePath)
            open(fpath, "wb").write(content[len(codecs.BOM_UTF8):])
        return

    # - Main ---------------------------------------------------------------

    if not isinstance(jobconf.get("fix-files", False), types.DictType):
        return

    console = Context.console
    classes = classesObj.keys()
    fixsettings = ExtMap(jobconf.get("fix-files"))

    # Fixing JS source files
    console.info("Fixing whitespace in source files...")
    console.indent()

    console.info("Fixing files: ", False)
    numClasses = len(classes)
    eolStyle = fixsettings.get("eol-style", "LF")
    tabWidth = fixsettings.get("tab-width", 2)
    for pos, classId in enumerate(classes):
        console.progress(pos + 1, numClasses)
        classEntry = classesObj[classId]
        filePath = classEntry.path
        fileEncoding = classEntry.encoding
        fileContent = filetool.read(filePath, fileEncoding)
        # Caveat: as filetool.read already calls any2Unix, converting to LF will
        # not work as the file content appears unchanged to this function
        if eolStyle == "CR":
            fixedContent = textutil.any2Mac(fileContent)
        elif eolStyle == "CRLF":
            fixedContent = textutil.any2Dos(fileContent)
        else:
            fixedContent = textutil.any2Unix(fileContent)
        fixedContent = textutil.normalizeWhiteSpace(
            textutil.removeTrailingSpaces(
                textutil.tab2Space(fixedContent, tabWidth)))
        if fixedContent != fileContent:
            console.debug("modifying file: %s" % filePath)
            filetool.save(filePath, fixedContent, fileEncoding)
        # this has to go separate, as it requires binary operation
        removeBOM(filePath)

    console.outdent()

    # Fixing PNG files -- currently just a stub!
    if fixsettings.get("fix-png", False):
        console.info("Fixing PNGs...")
        console.indent()
        fixPng()
        console.outdent()

    return
Beispiel #33
0
    def depsToProviderFormat(classDepsIter, depsLogConf):
        ##
        # duplicates CodeProvider.passesOutputFilter
        def passesOutputFilter(resId):
            # must match some include expressions
            if not filter(None, [x.search(resId) for x in inclregexps]):  # [None, None, _sre.match, None, _sre.match, ...]
                return False
            # must not match any exclude expressions
            if filter(None, [x.search(resId) for x in exclregexps]):
                return False
            return True

        # ---------------------------------------

        inclregexps = jobconf.get("provider/include", ["*"])
        exclregexps = jobconf.get("provider/exclude", [])
        inclregexps = map(textutil.toRegExp, inclregexps)
        exclregexps = map(textutil.toRegExp, exclregexps)
        replace_dots = depsLogConf.get("json/replace-dots-in", [])
        slashes_keys = 'keys' in replace_dots
        slashes_vals = 'values' in replace_dots

        classToDeps = {}
        # Class deps
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if passesOutputFilter(classId):
                if classId not in classToDeps:
                    classToDeps[classId] = {}
                    classToDeps[classId]["load"] = []
                    classToDeps[classId]["run"] = []
                if depId != None:
                    classToDeps[classId][loadOrRun].append(depId)

        if slashes_vals:
            # transform dep items
            for key, val in classToDeps.items():
                newval = []
                for ldep in val["load"]:
                    newdep = ldep.replace(".", "/")
                    newval.append(newdep)
                val["load"] = newval
                newval = []
                for ldep in val["run"]:
                    newdep = ldep.replace(".", "/")
                    newval.append(newdep)
                val["run"] = newval

        # Resource deps
        # class list
        classObjs = [x for x in script.classesObj if x.id in classToDeps.keys()]
        # map resources to class.resources
        classObjs = Class.mapResourcesToClasses(script.libraries, classObjs, jobconf.get("asset-let", {}))

        for clazz in classObjs:
            reskeys = ["/resource/resources#"+x.id for x in clazz.resources]
            classToDeps[clazz.id]["run"].extend(reskeys)

        # Message key deps
        for classId in classToDeps:
            #classKeys, _ = Locale.getTranslation(classId, {})
            classKeys, _ = script.classesAll[classId].messageStrings({})
            transIds  = set(x['id'] for x in classKeys) # get the msgid's, uniquely
            transIds.update(x['plural'] for x in classKeys if 'plural' in x) # add plural keys
            transKeys = ["/translation/i18n-${lang}#" + x for x in transIds]
            classToDeps[classId]["run"].extend(transKeys)

        # CLDR dependency
        for classId in classToDeps:
            if script.classesAll[classId].getHints("cldr"):
                classToDeps[classId]["run"].append("/locale/locale-${lang}#cldr")

        if slashes_keys:
            # transform dep keys ("qx.Class" -> "qx/Class.js")
            for key, val in classToDeps.items():
                if key.find(".")>-1:
                    newkey = key.replace(".", "/")
                    classToDeps[newkey] = classToDeps[key]
                    del classToDeps[key]

        # sort information for each class (for stable output)
        for classvals in classToDeps.values():
            for key in classvals:
                classvals[key] = sorted(classvals[key], reverse=True)

        # write to file
        file_ = depsLogConf.get('json/file', "deps.json")
        console.info("Writing dependency data to file: %s" % file_)
        pretty = depsLogConf.get('json/pretty', None)
        if pretty:
            indent     = 2
            separators = (', ', ': ')
        else:
            indent     = None
            separators = (',', ':')
        filetool.save(file_, json.dumps(classToDeps, sort_keys=True, indent=indent, separators=separators))

        return
Beispiel #34
0
    def storeApi(self, include, apiPath, variantSet, verify):
        self._console.info("Generating API data...")
        self._console.indent()

        docTree = tree.Node("doctree")
        length = len(include)

        self._console.info("Loading class docs...", False)
        self._console.indent()

        packages = []
        hasErrors = False
        for pos, fileId in enumerate(include):
            self._console.progress(pos+1, length)
            fileApi = self.getApi(fileId, variantSet)
            if fileApi == None:
                hasErrors = True
            
            # Only continue merging if there were no errors
            if not hasErrors:
                self._mergeApiNodes(docTree, fileApi)
                pkgId = self._classesObj[fileId].package
                # make sure all parent packages are included
                nsparts = pkgId.split('.')
                for i in range(len(nsparts)+1):
                    parentPkg = ".".join(nsparts[0:i])
                    if not parentPkg in packages:
                        packages.append(parentPkg)

        self._console.outdent()

        if hasErrors:
            self._console.error("Found erroneous API information. Please see above. Stopping!")
            return
                
        self._console.info("Loading package docs...")
        self._console.indent()
        
        packages.sort()
        for pkgId in packages:
            self._mergeApiNodes(docTree, self.getPackageApi(pkgId))

        self._console.outdent()

        self._console.info("Connecting classes...")
        api.connectPackage(docTree, docTree)

        self._console.info("Generating search index...")
        index = self.docTreeToSearchIndex(docTree, "", "", "")
        
        if verify and "links" in verify:
            self.verifyLinks(docTree, index)
        
        self._console.info("Saving data...", False)
        self._console.indent()

        packageData = api.getPackageData(docTree)
        packageJson = json.dumps(packageData)
        filetool.save(os.path.join(apiPath, "apidata.json"), packageJson)
        
        length = 0
        for classData in api.classNodeIterator(docTree):
            length += 1
            
        pos = 0
        for classData in api.classNodeIterator(docTree):
            pos += 1
            self._console.progress(pos, length)
            nodeData = tree.getNodeData(classData)
            nodeJson = json.dumps(nodeData)
            fileName = os.path.join(apiPath, classData.get("fullName") + ".json")
            filetool.save(fileName, nodeJson)
            
        self._console.outdent()
            
        self._console.info("Saving index...")
        indexContent = json.dumps(index, separators=(',',':'), sort_keys=True) # compact encoding
        filetool.save(os.path.join(apiPath, "apiindex.json"), indexContent)            

        self._console.outdent()
        self._console.info("Done")
Beispiel #35
0
 def writePackage(self, content, filePath, script):
     console.debug("Writing script file %s" % filePath)
     if script.scriptCompress:
         filetool.gzip(filePath, content)
     else:
         filetool.save(filePath, content)
Beispiel #36
0
    def storeApi(self, include, apiPath):
        self._console.info("Generating API data...")
        self._console.indent()

        docTree = tree.Node("doctree")
        length = len(include)

        self._console.info("Loading class docs...", False)
        self._console.indent()

        packages = []
        hasErrors = False
        for pos, fileId in enumerate(include):
            self._console.progress(pos + 1, length)
            fileApi = self.getApi(fileId)
            if fileApi == None:
                hasErrors = True

            # Only continue merging if there were no errors
            if not hasErrors:
                self._mergeApiNodes(docTree, fileApi)
                pkgId = self._classesObj[fileId].package
                # make sure all parent packages are included
                nsparts = pkgId.split('.')
                for i in range(len(nsparts) + 1):
                    parentPkg = ".".join(nsparts[0:i])
                    if not parentPkg in packages:
                        packages.append(parentPkg)

        self._console.outdent()

        if hasErrors:
            self._console.error(
                "Found erroneous API information. Please see above. Stopping!")
            return

        self._console.info("Loading package docs...")
        self._console.indent()

        packages.sort()
        for pkgId in packages:
            self._mergeApiNodes(docTree, self.getPackageApi(pkgId))

        self._console.outdent()

        self._console.info("Connecting classes...")
        api.connectPackage(docTree, docTree)

        self._console.info("Generating search index...")
        indexContent = self.docTreeToSearchIndex(docTree, "", "", "")

        self._console.info("Saving data...", False)
        self._console.indent()

        packageData = api.getPackageData(docTree)
        packageJson = json.dumps(packageData)
        filetool.save(os.path.join(apiPath, "apidata.json"), packageJson)

        length = 0
        for classData in api.classNodeIterator(docTree):
            length += 1

        pos = 0
        for classData in api.classNodeIterator(docTree):
            pos += 1
            self._console.progress(pos, length)
            nodeData = tree.getNodeData(classData)
            nodeJson = json.dumps(nodeData)
            fileName = os.path.join(apiPath,
                                    classData.get("fullName") + ".json")
            filetool.save(fileName, nodeJson)

        self._console.outdent()

        self._console.info("Saving index...")
        filetool.save(os.path.join(apiPath, "apiindex.json"), indexContent)

        self._console.outdent()
        self._console.info("Done")
Beispiel #37
0
    def depsToProviderFormat(classDepsIter, depsLogConf):
        ##
        # duplicates CodeProvider.passesOutputFilter
        def passesOutputFilter(resId):
            # must match some include expressions
            if not filter(None, [
                    x.search(resId) for x in inclregexps
            ]):  # [None, None, _sre.match, None, _sre.match, ...]
                return False
            # must not match any exclude expressions
            if filter(None, [x.search(resId) for x in exclregexps]):
                return False
            return True

        # ---------------------------------------

        inclregexps = jobconf.get("provider/include", ["*"])
        exclregexps = jobconf.get("provider/exclude", [])
        inclregexps = map(textutil.toRegExp, inclregexps)
        exclregexps = map(textutil.toRegExp, exclregexps)
        replace_dots = depsLogConf.get("json/replace-dots-in", [])
        slashes_keys = 'keys' in replace_dots
        slashes_vals = 'values' in replace_dots

        classToDeps = {}
        # Class deps
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if passesOutputFilter(classId):
                if classId not in classToDeps:
                    classToDeps[classId] = {}
                    classToDeps[classId]["load"] = []
                    classToDeps[classId]["run"] = []
                if depId != None:
                    classToDeps[classId][loadOrRun].append(depId)

        if slashes_vals:
            # transform dep items
            for key, val in classToDeps.items():
                newval = []
                for ldep in val["load"]:
                    newdep = ldep.replace(".", "/")
                    newval.append(newdep)
                val["load"] = newval
                newval = []
                for ldep in val["run"]:
                    newdep = ldep.replace(".", "/")
                    newval.append(newdep)
                val["run"] = newval

        # Resource deps
        # class list
        classObjs = [
            x for x in script.classesObj if x.id in classToDeps.keys()
        ]
        # map resources to class.resources
        classObjs = Class.mapResourcesToClasses(script.libraries, classObjs,
                                                jobconf.get("asset-let", {}))

        for clazz in classObjs:
            reskeys = ["/resource/resources#" + x.id for x in clazz.resources]
            classToDeps[clazz.id]["run"].extend(reskeys)

        # Message key deps
        for classId in classToDeps:
            #classKeys, _ = Locale.getTranslation(classId, {})
            classKeys, _ = script.classesAll[classId].messageStrings({})
            transIds = set(x['id']
                           for x in classKeys)  # get the msgid's, uniquely
            transIds.update(x['plural'] for x in classKeys
                            if 'plural' in x)  # add plural keys
            transKeys = ["/translation/i18n-${lang}#" + x for x in transIds]
            classToDeps[classId]["run"].extend(transKeys)

        # CLDR dependency
        for classId in classToDeps:
            if script.classesAll[classId].getHints("cldr"):
                classToDeps[classId]["run"].append(
                    "/locale/locale-${lang}#cldr")

        if slashes_keys:
            # transform dep keys ("qx.Class" -> "qx/Class.js")
            for key, val in classToDeps.items():
                if key.find(".") > -1:
                    newkey = key.replace(".", "/")
                    classToDeps[newkey] = classToDeps[key]
                    del classToDeps[key]

        # sort information for each class (for stable output)
        for classvals in classToDeps.values():
            for key in classvals:
                classvals[key] = sorted(classvals[key], reverse=True)

        # write to file
        file_ = depsLogConf.get('json/file', "deps.json")
        console.info("Writing dependency data to file: %s" % file_)
        pretty = depsLogConf.get('json/pretty', None)
        if pretty:
            indent = 2
            separators = (', ', ': ')
        else:
            indent = None
            separators = (',', ':')
        filetool.save(
            file_,
            json.dumps(classToDeps,
                       sort_keys=True,
                       indent=indent,
                       separators=separators))

        return
Beispiel #38
0
    def storeApi(self, include, apiPath):
        self._console.info("Generating API data...")
        self._console.indent()

        docTree = tree.Node("doctree")
        length = len(include)

        self._console.info("Loading class docs...", False)
        self._console.indent()

        packages = []
        hasErrors = False
        for pos, fileId in enumerate(include):
            self._console.progress(pos, length)
            fileApi = self.getApi(fileId)
            if fileApi == None:
                hasErrors = True

            # Only continue merging if there were no errors
            if not hasErrors:
                self._mergeApiNodes(docTree, fileApi)
                pkgId = self._classes[fileId]["package"]
                # make sure all parent packages are included
                nsparts = pkgId.split(".")
                for i in range(len(nsparts) + 1):
                    parentPkg = ".".join(nsparts[0:i])
                    if not parentPkg in packages:
                        packages.append(parentPkg)

        self._console.outdent()

        if hasErrors:
            self._console.error("Found errornous API information. Please see above. Stopping!")
            return

        self._console.info("Loading package docs...")
        self._console.indent()

        packages.sort()
        for pkgId in packages:
            self._mergeApiNodes(docTree, self.getPackageApi(pkgId))

        self._console.outdent()

        self._console.info("Connecting classes...")
        api.connectPackage(docTree, docTree)

        self._console.info("Generating search index...")
        indexContent = tree.nodeToIndexString(docTree, "", "", "")

        self._console.info("Saving data...", False)
        self._console.indent()

        packages = api.packagesToJsonString(docTree, "", "", "")
        filetool.save(os.path.join(apiPath, "apidata.js"), packages)

        length = 0
        for classData in api.classNodeIterator(docTree):
            length += 1

        pos = 0
        for classData in api.classNodeIterator(docTree):
            pos += 1
            self._console.progress(pos, length)
            classContent = tree.nodeToJsonString(classData, "", "", "")
            fileName = os.path.join(apiPath, classData.get("fullName") + ".js")
            filetool.save(fileName, classContent)

        self._console.outdent()

        self._console.info("Saving index...")
        filetool.save(os.path.join(apiPath, "apiindex.js"), indexContent)

        self._console.outdent()
        self._console.info("Done")
Beispiel #39
0
    def storeApi(self, include, apiPath, variantSet, jobConf):
        self._console.info("Generating API data...")
        self._console.indent()

        docTree = tree.Node("doctree")
        docTree.set("fullName", "")
        docTree.set("name", "")
        docTree.set("packageName", "")
        length = len(include)

        self._console.info("Loading class docs...", False)
        self._console.indent()

        packages = []
        AttachMap = {}
        hasErrors = False
        for pos, fileId in enumerate(include):
            self._console.progress(pos+1, length)
            fileApi, attachMap = self.getApi(fileId, variantSet)
            if fileApi == None:
                hasErrors = True

            # Only continue merging if there were no errors
            if not hasErrors:
                # update AttachMap
                for cls in attachMap: # 'qx.Class', 'qx.core.Object', 'q', ...
                    if cls not in AttachMap:
                        AttachMap[cls] = attachMap[cls]
                    else:
                        for section in attachMap[cls]:  # 'statics', 'members'
                            if section not in AttachMap[cls]:
                                AttachMap[cls][section] = attachMap[cls][section]
                            else:
                                for method in attachMap[cls][section]:  # 'define', 'showToolTip', ...
                                    if method not in AttachMap[cls][section]:
                                        AttachMap[cls][section][method] = attachMap[cls][section][method]
                                    else:
                                        self._console.warn("Multiple @attach for same target '%s::%s#%s'." % (cls, section, method))

                self._mergeApiNodes(docTree, fileApi)
                pkgId = self._classesObj[fileId].package
                # make sure all parent packages are included
                nsparts = pkgId.split('.')
                for i in range(len(nsparts)+1):
                    parentPkg = ".".join(nsparts[0:i])
                    if not parentPkg in packages:
                        packages.append(parentPkg)

        self._console.outdent()

        if hasErrors:
            self._console.error("Found erroneous API information. Please see above. Stopping!")
            return

        self._console.info("Loading package docs...")
        self._console.indent()

        packages.sort()
        for pkgId in packages:
            self._mergeApiNodes(docTree, self.getPackageApi(pkgId))

        self._console.outdent()

        self._console.info("Connecting classes...  ", feed=False)
        api.connectPackage(docTree, docTree)
        self._console.dotclear()

        self._console.info("Generating search index...")
        index = self.docTreeToSearchIndex(docTree, "", "", "")

        if "verify" in jobConf:
            if "links" in jobConf["verify"]:
                api.verifyLinks(docTree, index)
            if "types" in jobConf["verify"]:
                api.verifyTypes(docTree, index)

        if "warnings" in jobConf and "output" in jobConf["warnings"]:
            api.logErrors(docTree, jobConf["warnings"]["output"])

        if "verify" in jobConf:
            if "statistics" in jobConf["verify"]:
                api.verifyDocPercentage(docTree)

        self._console.info("Saving data...", False)
        self._console.indent()

        packageData = api.getPackageData(docTree)
        packageJson = json.dumps(packageData)
        filetool.save(os.path.join(apiPath, "apidata.json"), packageJson)

        # apply the @attach information
        for classData in api.classNodeIterator(docTree):
            className = classData.get("fullName")
            if className in AttachMap:
                self._applyAttachInfo(className, classData, AttachMap[className])

        # write per-class .json to disk
        length = 0
        for classData in api.classNodeIterator(docTree):
            length += 1

        links = []

        pos = 0
        for classData in api.classNodeIterator(docTree):
            pos += 1
            self._console.progress(pos, length)
            nodeData = tree.getNodeData(classData)
            nodeJson = json.dumps(nodeData)
            className = classData.get("fullName")
            fileName = os.path.join(apiPath, className + ".json")
            filetool.save(fileName, nodeJson)

            sitemap = False
            if "sitemap" in jobConf:
                sitemap = jobConf["sitemap"]
                if "link-uri" in sitemap:
                    links.append(sitemap["link-uri"] % className)

            #import pdb; pdb.set_trace()
            #for type in ["method", "method-static", "event", "property", "constant"]:
            #  for item in classData.getAllChildrenOfType(type):
            #      itemName = className + "~" + item.attributes["name"]
            #      link = linkPrefix + itemName

        self._console.outdent()

        # write apiindex.json
        self._console.info("Saving index...")
        indexContent = json.dumps(index, separators=(', ', ':'), sort_keys=True)  # compact encoding
        filetool.save(os.path.join(apiPath, "apiindex.json"), indexContent)

        # save sitemap
        if sitemap and len(links) > 0:
            self._console.info("Saving XML sitemap...")
            sitemapData = self.getSitemap(links)
            if "file" in sitemap:
                sitemapFile = sitemap["file"]
            else:
                sitemapFile = os.path.join(apiPath, "sitemap.xml")
            filetool.save(sitemapFile, sitemapData)

        self._console.outdent()
        self._console.info("Done")
Beispiel #40
0
def CreateDemoJson():

    source = []
    build  = []
    scategories = {}
    bcategories = {}

    # Pre-processing
    JSON = {}
    # top-level includes
    default_json = 'tool' + '/' + 'default.json'
    assert os.path.isfile(default_json)
    JSON['include'] = [{ "path" : "%s" % default_json }]

    # per-demo template file
    json_tmpl = open(os.path.join('tool','tmpl.json'),"rU").read()

    # jobs section
    JSON['jobs'] = {}

    # Process demo html files
    while True:
        html = (yield)
        #print html
        if html == None:  # terminate the generator part and go to finalizing json file
            break

        category, name = demoCategoryFromFile(html)
        #print ">>> Processing: %s.%s..." % (category, name)

        # check for demo-specific config file
        config_file = os.path.splitext(html)[0] + ".json"
        if os.path.exists(config_file):
            JSON['include'].append({"path":"%s" % config_file})

        # build classname
        simple = "%s.%s" % (category,name)
        source.append("source-%s" % simple)
        build.append("build-%s" % simple)
        if not category in scategories:
            scategories[category] = []
        scategories[category].append("source-%s" % (simple,))
        if not category in bcategories:
            bcategories[category] = []
        bcategories[category].append("build-%s" % (simple,))

        # concat all
        currcont = json_tmpl.replace('XXX',"%s.%s"%(category,name)).replace("YYY",name).replace("ZZZ",category)
        templatejobs = json.loads("{" + currcont + "}")
        for job,jobval in templatejobs.iteritems():
            JSON['jobs'][job] = jobval

    # Post-processing
    for category in scategories:
        currentry = JSON['jobs']["source-%s" % category] = {}
        currentry['run'] = sorted(scategories[category])
        currentry = JSON['jobs']["build-%s" % category] = {}
        currentry['run'] = sorted(bcategories[category])

    JSON['jobs']["source"] = { "run" : sorted(source) }
    JSON['jobs']["build"]  = { "run" : sorted(build) }

    cont = '// This file is dynamically created by the generator!\n'
    cont += json.dumps(JSON, sort_keys=True, indent=2)
    filetool.save(fJSON, cont)

    yield  # final yield to provide for .send(None) of caller
Beispiel #41
0
 def writePackage(self, content, filePath, script):
     console.debug("Writing script file %s" % filePath)
     if script.scriptCompress:
         filetool.gzip(filePath, content)
     else:
         filetool.save(filePath, content)
Beispiel #42
0
    def runCompiled(self, script, treeCompiler, version="build"):

        def getOutputFile(compileType):
            filePath = compConf.get("paths/file")
            if not filePath:
                filePath = os.path.join(compileType, "script", script.namespace + ".js")
            return filePath

        def getFileUri(scriptUri):
            appfile = os.path.basename(fileRelPath)
            fileUri = os.path.join(scriptUri, appfile)  # make complete with file name
            fileUri = Path.posifyPath(fileUri)
            return fileUri

        ##
        # returns the Javascript code for the initial ("boot") script as a string,
        #  using the loader.tmpl template and filling its placeholders
        def generateBootCode(parts, packages, boot, script, compConf, variants, settings, bootCode, globalCodes, version="source", decodeUrisFile=None, format=False):

            ##
            # create a map with part names as key and array of package id's and
            # return as string
            def partsMap(script):
                partData = {}
                packages = script.packagesSortedSimple()
                #print "packages: %r" % packages
                for part in script.parts:
                    partData[part] = script.parts[part].packagesAsIndices(packages)
                    #print "part '%s': %r" % (part, script.parts[part].packages)
                partData = json.dumpsCode(partData)

                return partData

            def fillTemplate(vals, template):
                # Fill the code template with various vals 
                templ  = MyTemplate(template)
                result = templ.safe_substitute(vals)

                return result

            def packageUrisToJS1(packages, version, namespace=None):
                # Translate URI data to JavaScript
                
                allUris = []
                for packageId, package in enumerate(packages):
                    packageUris = []
                    for fileId in package:

                        if version == "build":
                            # TODO: gosh, the next is an ugly hack!
                            #namespace  = self._resourceHandler._genobj._namespaces[0]  # all name spaces point to the same paths in the libinfo struct, so any of them will do
                            if not namespace:
                                namespace  = script.namespace  # all name spaces point to the same paths in the libinfo struct, so any of them will do
                            relpath    = OsPath(fileId)
                        else:
                            namespace  = self._classes[fileId]["namespace"]
                            relpath    = OsPath(self._classes[fileId]["relpath"])

                        shortUri = Uri(relpath.toUri())
                        packageUris.append("%s:%s" % (namespace, shortUri.encodedValue()))
                    allUris.append(packageUris)

                return allUris

            ##
            # Translate URI data to JavaScript
            # using Package objects
            def packageUrisToJS(packages, version):

                allUris = []
                for packageId, package in enumerate(packages):
                    packageUris = []
                    if package.file: # build
                        namespace = "__out__"
                        fileId    = package.file
                        relpath    = OsPath(fileId)
                        shortUri   = Uri(relpath.toUri())
                        packageUris.append("%s:%s" % (namespace, shortUri.encodedValue()))
                    else: # "source" :
                        for clazz in package.classes:
                            namespace  = self._classes[clazz]["namespace"]
                            relpath    = OsPath(self._classes[clazz]["relpath"])
                            shortUri   = Uri(relpath.toUri())
                            packageUris.append("%s:%s" % (namespace, shortUri.encodedValue()))
                    allUris.append(packageUris)

                return allUris


            def loadTemplate(bootCode):
                # try custom loader templates
                loaderFile = compConf.get("paths/loader-template", None)
                if not loaderFile:
                    # use default templates
                    if version=="build":
                        #loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader-build.tmpl.js")
                        # TODO: test-wise using generic template
                        loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader.tmpl.js")
                    else:
                        #loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader-source.tmpl.js")
                        loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader.tmpl.js")
                
                template = filetool.read(loaderFile)

                return template

            # ---------------------------------------------------------------

            if not parts:
                return ""

            result           = ""
            vals             = {}
            packages         = script.packagesSortedSimple()
            loader_with_boot = self._job.get("packages/loader-with-boot", True)

            # stringify data in globalCodes
            for entry in globalCodes:
                globalCodes[entry] = json.dumpsCode(globalCodes[entry])
                # undo damage done by simplejson to raw strings with escapes \\ -> \
                globalCodes[entry] = globalCodes[entry].replace('\\\\\\', '\\').replace(r'\\', '\\')  # " gets tripple escaped, therefore the first .replace()

            vals.update(globalCodes)

            if version=="build":
                vals["Resources"] = json.dumpsCode({})  # TODO: undo Resources from globalCodes!!!
            vals["Boot"] = '"%s"' % boot
            if version == "build":
                vals["BootPart"] = bootCode
            else:
                vals["BootPart"] = ""
                # fake package data
                for key, package in enumerate(packages): 
                    vals["BootPart"] += "qx.$$packageData['%d']={};\n" % key

            # Translate part information to JavaScript
            vals["Parts"] = partsMap(script)

            # Translate URI data to JavaScript
            #vals["Uris"] = packageUrisToJS1(packages, version)
            vals["Uris"] = packageUrisToJS(packages, version)
            vals["Uris"] = json.dumpsCode(vals["Uris"])

            # Add potential extra scripts
            vals["UrisBefore"] = []
            if self._job.get("add-script", False):
                additional_scripts = self._job.get("add-script",[])
                for additional_script in additional_scripts:
                    vals["UrisBefore"].append(additional_script["uri"])
            vals["UrisBefore"] = json.dumpsCode(vals["UrisBefore"])

            # Whether boot package is inline
            if version == "source":
                vals["BootIsInline"] = json.dumpsCode(False)
            else:
                vals["BootIsInline"] = json.dumpsCode(loader_with_boot)
                
            # Closure package information
            cParts = {}
            if version == "build":
                for part in script.parts:
                    if not loader_with_boot or part != "boot":
                        cParts[part] = True
            vals["ClosureParts"] = json.dumpsCode(cParts)

            # Package Hashes
            vals["PackageHashes"] = {}
            for key, package in enumerate(packages):
                if package.hash:
                    vals["PackageHashes"][key] = package.hash
                else:
                    vals["PackageHashes"][key] = "%d" % key  # fake code package hashes in source ver.
            vals["PackageHashes"] = json.dumpsCode(vals["PackageHashes"])

            # Script hook for qx.$$loader.decodeUris() function
            vals["DecodeUrisPlug"] = ""
            if decodeUrisFile:
                plugCode = filetool.read(self._config.absPath(decodeUrisFile))  # let it bomb if file can't be read
                vals["DecodeUrisPlug"] = plugCode.strip()
            
            # Enable "?nocache=...." for script loading?
            vals["NoCacheParam"] = "true" if self._job.get("compile-options/uris/add-nocache-param", True) else "false"

            # Locate and load loader basic script
            template = loadTemplate(bootCode)

            # Fill template gives result
            result = fillTemplate(vals, template)

            return result


        ##
        # shallow layer above generateBootCode(), and its only client
        def generateBootScript(globalCodes, script, bootPackage="", compileType="build"):

            self._console.info("Generating boot script...")

            if not self._job.get("packages/i18n-with-boot", True):
                # remove I18N info from globalCodes, so they don't go into the loader
                globalCodes["Translations"] = {}
                globalCodes["Locales"]      = {}
            else:
                if compileType == "build":
                    # also remove them here, as this info is now with the packages
                    globalCodes["Translations"] = {}
                    globalCodes["Locales"]      = {}

            plugCodeFile = compConf.get("code/decode-uris-plug", False)
            if compileType == "build":
                filepackages = [(x.file,) for x in packages]
                bootContent  = generateBootCode(parts, filepackages, boot, script, compConf, variants, settings, bootPackage, globalCodes, compileType, plugCodeFile, format)
            else:
                filepackages = [x.classes for x in packages]
                bootContent  = generateBootCode(parts, filepackages, boot, script, compConf, variants={}, settings={}, bootCode=None, globalCodes=globalCodes, version=compileType, decodeUrisFile=plugCodeFile, format=format)


            return bootContent


        def getPackageData(package):
            data = {}
            data["resources"]    = package.data.resources
            data["translations"] = package.data.translations
            data["locales"]      = package.data.locales
            data = json.dumpsCode(data)
            data += ';\n'
            return data


        def compilePackage(packageIndex, package):
            self._console.info("Compiling package #%s:" % packageIndex, False)
            self._console.indent()

            # Compile file content
            pkgCode = self._treeCompiler.compileClasses(package.classes, variants, optimize, format)
            pkgData = getPackageData(package)
            hash    = sha.getHash(pkgData + pkgCode)[:12]  # first 12 chars should be enough

            isBootPackage = packageIndex == 0
            if isBootPackage:
                compiledContent = ("qx.$$packageData['%s']=" % hash) + pkgData + pkgCode
            else:
                compiledContent  = u'''qx.$$packageData['%s']=%s\n''' % (hash, pkgData)
                compiledContent += u'''qx.Part.$$notifyLoad("%s", function() {\n%s\n});''' % (hash, pkgCode)
            
            #
            package.hash = hash  # to fill qx.$$loader.packageHashes in generateBootScript()

            self._console.debug("Done: %s" % self._computeContentSize(compiledContent))
            self._console.outdent()

            return compiledContent


        ##
        # takes an array of (po-data, locale-data) dict pairs
        # merge all po data and all cldr data in a single dict each
        def mergeTranslationMaps(transMaps):
            poData = {}
            cldrData = {}

            for pac_dat, loc_dat in transMaps:
                for loc in pac_dat:
                    if loc not in poData:
                        poData[loc] = {}
                    poData[loc].update(pac_dat[loc])
                for loc in loc_dat:
                    if loc not in cldrData:
                        cldrData[loc] = {}
                    cldrData[loc].update(loc_dat[loc])

            return (poData, cldrData)


        # -- Main - runCompiled ------------------------------------------------

        # Early return
        compileType = self._job.get("compile/type", "")
        if compileType not in ("build", "source"):
            return

        packages   = script.packagesSortedSimple()
        parts      = script.parts
        boot       = script.boot
        variants   = script.variants
        libraries  = script.libraries

        self._treeCompiler = treeCompiler
        self._variants     = variants
        self._script       = script

        self._console.info("Generate %s version..." % compileType)
        self._console.indent()

        # - Evaluate job config ---------------------
        # Compile config
        compConf = self._job.get("compile-options")
        compConf = ExtMap(compConf)

        # Whether the code should be formatted
        format = compConf.get("code/format", False)
        script.scriptCompress = compConf.get("paths/gzip", False)

        # Read in settings
        settings = self.getSettings()
        script.settings = settings

        # Read libraries
        libs = self._job.get("library", [])

        # Get translation maps
        locales = compConf.get("code/locales", [])
        translationMaps = self.getTranslationMaps(packages, variants, locales)

        # Read in base file name
        fileRelPath = getOutputFile(compileType)
        filePath    = self._config.absPath(fileRelPath)
        script.baseScriptPath = filePath

        if compileType == "build":
            # read in uri prefixes
            scriptUri = compConf.get('uris/script', 'script')
            scriptUri = Path.posifyPath(scriptUri)
            fileUri   = getFileUri(scriptUri)
            # for resource list
            resourceUri = compConf.get('uris/resource', 'resource')
            resourceUri = Path.posifyPath(resourceUri)
        else:
            # source version needs place where the app HTML ("index.html") lives
            self.approot = self._config.absPath(compConf.get("paths/app-root", ""))
            resourceUri = None
            scriptUri   = None

        # Get global script data (like qxlibraries, qxresources,...)
        globalCodes                = {}
        globalCodes["Settings"]    = settings
        globalCodes["Variants"]    = self.generateVariantsCode(variants)
        globalCodes["Libinfo"]     = self.generateLibInfoCode(libs, format, resourceUri, scriptUri)
        # add synthetic output lib
        if scriptUri: out_sourceUri= scriptUri
        else:
            out_sourceUri = self._computeResourceUri({'class': ".", 'path': os.path.dirname(script.baseScriptPath)}, OsPath(""), rType="class", appRoot=self.approot)
            out_sourceUri = os.path.normpath(out_sourceUri.encodedValue())
        globalCodes["Libinfo"]['__out__'] = { 'sourceUri': out_sourceUri }
        globalCodes["Resources"]    = self.generateResourceInfoCode(script, settings, libraries, format)
        globalCodes["Translations"],\
        globalCodes["Locales"]      = mergeTranslationMaps(translationMaps)

        # Potentally create dedicated I18N packages
        i18n_as_parts = not self._job.get("packages/i18n-with-boot", True)
        if i18n_as_parts:
            script = self.generateI18NParts(script, globalCodes)
            self.writePackages([p for p in script.packages if getattr(p, "__localeflag", False)], script)

        if compileType == "build":

            # - Specific job config ---------------------
            # read in compiler options
            optimize = compConf.get("code/optimize", [])
            self._treeCompiler.setOptimize(optimize)

            # - Generating packages ---------------------
            self._console.info("Generating packages...")
            self._console.indent()

            bootPackage = ""
            for packageIndex, package in enumerate(packages):
                package.compiled = compilePackage(packageIndex, package)

            self._console.outdent()
            if not len(packages):
                raise RuntimeError("No valid boot package generated.")

            # - Put loader and packages together -------
            loader_with_boot = self._job.get("packages/loader-with-boot", True)
            # handle loader and boot package
            if not loader_with_boot:
                loadPackage = Package(0)            # make a dummy Package for the loader
                packages.insert(0, loadPackage)

            # attach file names (do this before calling generateBootScript)
            for package, fileName in zip(packages, self.packagesFileNames(script.baseScriptPath, len(packages))):
                package.file = os.path.basename(fileName)
                if self._job.get("compile-options/paths/scripts-add-hash", False):
                    package.file = self._fileNameWithHash(package.file, package.hash)

            # generate and integrate boot code
            if loader_with_boot:
                # merge loader code with first package
                bootCode = generateBootScript(globalCodes, script, packages[0].compiled)
                packages[0].compiled = bootCode
            else:
                loaderCode = generateBootScript(globalCodes, script)
                packages[0].compiled = loaderCode

            # write packages
            self.writePackages(packages, script)

        # ---- 'source' version ------------------------------------------------
        else:

            sourceContent = generateBootScript(globalCodes, script, bootPackage="", compileType=compileType)

            # Construct file name
            resolvedFilePath = self._resolveFileName(filePath, variants, settings)

            # Save result file
            filetool.save(resolvedFilePath, sourceContent)

            if compConf.get("paths/gzip"):
                filetool.gzip(resolvedFilePath, sourceContent)

            self._console.outdent()
            self._console.debug("Done: %s" % self._computeContentSize(sourceContent))
            self._console.outdent()

        self._console.outdent()

        return  # runCompiled()
Beispiel #43
0
    def updateTranslations(self, namespace, translationDir, localesList=None):

        def parsePOEntryStrings(poset):
            for poentry in poset:
                poentry.msgid        = self.parseAsUnicodeString(poentry.msgid)
                poentry.msgid_plural = self.parseAsUnicodeString(poentry.msgid_plural)
                if poentry.msgstr_plural:
                    for pos in poentry.msgstr_plural:
                        poentry.msgstr_plural[pos] = self.parseAsUnicodeString(poentry.msgstr_plural[pos])

        def unescapeMsgIds(poset):
            for poentry in poset:
                if poentry.msgid.find(r'\\') > -1:
                    poentry.msgid = self.recoverBackslashEscapes(poentry.msgid)

        # ----------------------------------------------------------------------

        self._console.info("Updating namespace: %s" % namespace)
        self._console.indent()
        
        self._console.debug("Looking up relevant class files...")
        classList = []
        classes = self._classesObj
        for classId in classes:
            if classes[classId].library.namespace == namespace:
                classList.append(classId)
                    
        self._console.debug("Compiling filter...")
        pot = self.getPotFile(classList)  # pot: translation keys from the source code
        pot.sort()

        allLocales = self._translation[namespace]
        if localesList == None:
            selectedLocales = allLocales.keys()
        else:
            selectedLocales = localesList
            for locale in selectedLocales:
                if locale not in allLocales:
                    path = os.path.join(translationDir, locale + ".po")
                    f    = open(path, 'w')  # create stanza file
                    pof  = self.createPoFile()
                    f.write(str(pof))
                    f.close()
                    allLocales[locale] = Library.translationEntry(locale, path, namespace)

        self._console.info("Updating %d translations..." % len(selectedLocales))
        self._console.indent()

        for locale in selectedLocales:
            self._console.debug("Processing: %s" % locale)
            self._console.indent()

            entry = allLocales[locale]
            po = polib.pofile(entry["path"])  # po: .po file from disk
            po.merge(pot)
            po.sort()
            self._console.debug("Percent translated: %d" % (po.percent_translated(),))
            #po.save(entry["path"])
            poString = str(po)
            #poString = self.recoverBackslashEscapes(poString)
            filetool.save(entry["path"], poString)
            self._console.outdent()

        self._console.outdent()
        self._console.outdent()
Beispiel #44
0
    def updateTranslations(self, namespace, translationDir, localesList=None):

        def parsePOEntryStrings(poset):
            for poentry in poset:
                poentry.msgid        = self.parseAsUnicodeString(poentry.msgid)
                poentry.msgid_plural = self.parseAsUnicodeString(poentry.msgid_plural)
                if poentry.msgstr_plural:
                    for pos in poentry.msgstr_plural:
                        poentry.msgstr_plural[pos] = self.parseAsUnicodeString(poentry.msgstr_plural[pos])

        def unescapeMsgIds(poset):
            for poentry in poset:
                if poentry.msgid.find(r'\\') > -1:
                    poentry.msgid = self.recoverBackslashEscapes(poentry.msgid)

        # ----------------------------------------------------------------------

        self._console.info("Updating namespace: %s" % namespace)
        self._console.indent()
        
        self._console.debug("Looking up relevant class files...")
        classList = []
        classes = self._classesObj
        for classId in classes:
            if classes[classId].library.namespace == namespace:
                classList.append(classId)
                    
        self._console.debug("Compiling filter...")
        pot = self.getPotFile(classList)  # pot: translation keys from the source code
        pot.sort()

        allLocales = self._translation[namespace]
        if localesList == None:
            selectedLocales = allLocales.keys()
        else:
            selectedLocales = localesList
            for locale in selectedLocales:
                if locale not in allLocales:
                    path = os.path.join(translationDir, locale + ".po")
                    f    = open(path, 'w')  # create stanza file
                    pof  = self.createPoFile()
                    f.write(str(pof))
                    f.close()
                    allLocales[locale] = Library.translationEntry(locale, path, namespace)

        self._console.info("Updating %d translations..." % len(selectedLocales))
        self._console.indent()

        for locale in selectedLocales:
            self._console.debug("Processing: %s" % locale)
            self._console.indent()

            entry = allLocales[locale]
            po = polib.pofile(entry["path"])  # po: .po file from disk
            po.merge(pot)
            po.sort()
            self._console.debug("Percent translated: %d" % (po.percent_translated(),))
            #po.save(entry["path"])
            poString = str(po)
            #poString = self.recoverBackslashEscapes(poString)
            filetool.save(entry["path"], poString)
            self._console.outdent()

        self._console.outdent()
        self._console.outdent()
Beispiel #45
0
 def writeDotFile(gr1, depsLogConf):
     file = depsLogConf.get('dot/file', "deps.dot")
     dot = gr1.write(fmt='dotwt')
     console.info("Writing dependency graph to file: %s" % file)
     filetool.save(file, dot)
     return
Beispiel #46
0
    def storeApi(self, include, apiPath, variantSet, verify):
        self._console.info("Generating API data...")
        self._console.indent()

        docTree = tree.Node("doctree")
        docTree.set("fullName", "")
        docTree.set("name", "")
        docTree.set("packageName", "")
        length = len(include)

        self._console.info("Loading class docs...", False)
        self._console.indent()

        packages = []
        AttachMap = {}
        hasErrors = False
        for pos, fileId in enumerate(include):
            self._console.progress(pos+1, length)
            fileApi, attachMap = self.getApi(fileId, variantSet)
            if fileApi == None:
                hasErrors = True
            
            # Only continue merging if there were no errors
            if not hasErrors:
                # update AttachMap
                for cls in attachMap: # 'qx.Class', 'qx.core.Object', 'q', ...
                    if cls not in AttachMap:
                        AttachMap[cls] = attachMap[cls]
                    else:
                        for section in attachMap[cls]:  # 'statics', 'members'
                            if section not in AttachMap[cls]:
                                AttachMap[cls][section] = attachMap[cls][section]
                            else:
                                for method in attachMap[cls][section]:  # 'define', 'showToolTip', ...
                                    if method not in AttachMap[cls][section]:
                                        AttachMap[cls][section][method] = attachMap[cls][section][method]
                                    else:
                                        self._console.warn("Multiple @attach for same target '%s::%s#%s'." % (cls, section, method))

                self._mergeApiNodes(docTree, fileApi)
                pkgId = self._classesObj[fileId].package
                # make sure all parent packages are included
                nsparts = pkgId.split('.')
                for i in range(len(nsparts)+1):
                    parentPkg = ".".join(nsparts[0:i])
                    if not parentPkg in packages:
                        packages.append(parentPkg)

        self._console.outdent()

        if hasErrors:
            self._console.error("Found erroneous API information. Please see above. Stopping!")
            return
                
        self._console.info("Loading package docs...")
        self._console.indent()
        
        packages.sort()
        for pkgId in packages:
            self._mergeApiNodes(docTree, self.getPackageApi(pkgId))

        self._console.outdent()

        self._console.info("Connecting classes...")
        api.connectPackage(docTree, docTree)

        self._console.info("Generating search index...")
        index = self.docTreeToSearchIndex(docTree, "", "", "")
        
        if verify and "links" in verify:
            self.verifyLinks(docTree, index)
        
        self._console.info("Saving data...", False)
        self._console.indent()

        packageData = api.getPackageData(docTree)
        packageJson = json.dumps(packageData)
        filetool.save(os.path.join(apiPath, "apidata.json"), packageJson)

        # apply the @attach information
        for classData in api.classNodeIterator(docTree):
            className = classData.get("fullName")
            if className in AttachMap:
                self._applyAttachInfo(className, classData, AttachMap[className])
        
        # write per-class .json to disk
        length = 0
        for classData in api.classNodeIterator(docTree):
            length += 1
            
        pos = 0
        for classData in api.classNodeIterator(docTree):
            pos += 1
            self._console.progress(pos, length)
            nodeData = tree.getNodeData(classData)
            nodeJson = json.dumps(nodeData)
            fileName = os.path.join(apiPath, classData.get("fullName") + ".json")
            filetool.save(fileName, nodeJson)
            
        self._console.outdent()
            
        # writ apiindex.json
        self._console.info("Saving index...")
        indexContent = json.dumps(index, separators=(',',':'), sort_keys=True) # compact encoding
        filetool.save(os.path.join(apiPath, "apiindex.json"), indexContent)            

        self._console.outdent()
        self._console.info("Done")
Beispiel #47
0
def CreateDemoJson():

    source = []
    build = []
    scategories = {}
    bcategories = {}

    # Pre-processing
    JSON = {}
    # top-level includes
    default_json = 'tool' + '/' + 'default.json'
    assert os.path.isfile(default_json)
    JSON['include'] = [{"path": "%s" % default_json}]

    # per-demo template file
    json_tmpl = open(os.path.join('tool', 'tmpl.json'), "rU").read()

    # jobs section
    JSON['jobs'] = {}

    # Process demo html files
    while True:
        html = (yield)
        #print html
        if html == None:  # terminate the generator part and go to finalizing json file
            break

        category, name = demoCategoryFromFile(html)
        #print ">>> Processing: %s.%s..." % (category, name)

        # check for demo-specific config file
        config_file = os.path.splitext(html)[0] + ".json"
        if os.path.exists(config_file):
            JSON['include'].append({"path": "%s" % config_file})

        # build classname
        simple = "%s.%s" % (category, name)
        source.append("source-%s" % simple)
        build.append("build-%s" % simple)
        if not category in scategories:
            scategories[category] = []
        scategories[category].append("source-%s" % (simple, ))
        if not category in bcategories:
            bcategories[category] = []
        bcategories[category].append("build-%s" % (simple, ))

        # concat all
        currcont = json_tmpl.replace('XXX',
                                     "%s.%s" % (category, name)).replace(
                                         "YYY", name).replace("ZZZ", category)
        templatejobs = json.loads("{" + currcont + "}")
        for job, jobval in templatejobs.iteritems():
            JSON['jobs'][job] = jobval

    # Post-processing
    for category in scategories:
        currentry = JSON['jobs']["source-%s" % category] = {}
        currentry['run'] = sorted(scategories[category])
        currentry = JSON['jobs']["build-%s" % category] = {}
        currentry['run'] = sorted(bcategories[category])

    JSON['jobs']["source"] = {"run": sorted(source)}
    JSON['jobs']["build"] = {"run": sorted(build)}

    cont = '// This file is dynamically created by the generator!\n'
    cont += json.dumps(JSON, sort_keys=True, indent=2)
    filetool.save(fJSON, cont)

    yield  # final yield to provide for .send(None) of caller
Beispiel #48
0
def runImageCombining(jobconf, confObj):

    def extractFromPrefixSpec(prefixSpec):
        prefix = altprefix = ""
        if not prefixSpec or not isinstance(prefixSpec, types.ListType):
            if jobconf.get("config-warnings/combine-images", True):
                console.warn("Missing or incorrect prefix spec, might lead to incorrect resource id's.")
        elif len(prefixSpec) == 2 :  # prefixSpec = [ prefix, altprefix ]
            prefix, altprefix = prefixSpec
        elif len(prefixSpec) == 1:
            prefix            = prefixSpec[0]
            altprefix         = ""
        return prefix, altprefix

    ##
    # strip prefix - if available - from imagePath, and replace by altprefix
    def getImageId(imagePath, prefixSpec):
        prefix, altprefix = extractFromPrefixSpec(prefixSpec)
        imageId = imagePath # init
        _, imageId, _ = Path.getCommonPrefix(imagePath, prefix) # assume: imagePath = prefix "/" imageId
        if altprefix:
            imageId   = altprefix + "/" + imageId

        imageId = Path.posifyPath(imageId)
        return imageId

    ##
    # create a dict with the clipped image file path as key, and prefix elements as value
    def getClippedImagesDict(imageSpec):
        imgDict = {}
        inputStruct = imageSpec['input']
        for group in inputStruct:
            prefixSpec = group.get('prefix', [])
            prefix, altprefix = extractFromPrefixSpec(prefixSpec)
            if prefix:
                prefix = confObj.absPath(prefix)
            for filepatt in group['files']:
                num_files = 0
                for file in glob.glob(confObj.absPath(filepatt)):  # resolve file globs - TODO: can be removed in generator.action.ImageClipping
                    console.debug("adding image %s" % file)
                    imgDict[file]    = [prefix, altprefix]
                    num_files       += 1
                if num_files == 0:
                    raise ValueError("Non-existing file spec: %s" % filepatt)

        return imgDict

    # ----------------------------------------------------------------------

    if not jobconf.get("combine-images", False):
        return
    
    console = Context.console
    cache = Context.cache

    console.info("Combining images...")
    console.indent()
    imageClipper = ImageClipping(console, cache, jobconf)

    images = jobconf.get("combine-images/images", {})
    for image, imgspec in images.iteritems():
        console.info("Creating image %s" % image)
        console.indent()
        imageId= getImageId(image, imgspec.get('prefix', []))
        image  = confObj.absPath(image)  # abs output path
        config = {}

        # create a dict of clipped image objects - for later look-up
        clippedImages = getClippedImagesDict(imgspec)

        # collect list of all input files, no matter where they come from
        input = sorted(clippedImages.keys())

        # collect layout property
        if 'layout' in imgspec:
            layout = imgspec['layout'] == "horizontal"
        else:
            layout = "horizontal" == "horizontal" # default horizontal=True

        # get type of combined image (png, base64, ...)
        combtype = "base64" if image.endswith(".b64.json") else "extension"

        # create the combined image
        subconfigs = imageClipper.combine(image, input, layout, combtype)

        # for the meta information, go through the list of returned subconfigs (one per clipped image)
        for sub in subconfigs:
            x = Image()
            x.combId, x.left, x.top, x.width, x.height, x.format = (
               imageId, sub['left'], sub['top'], sub['width'], sub['height'], sub['type'])
            subId = getImageId(sub['file'], clippedImages[sub['file']])
            config[subId] = x.toMeta()

        # store meta data for this combined image
        bname = os.path.basename(image)
        ri = bname.rfind('.')
        if ri > -1:
            bname = bname[:ri]
        bname += '.meta'
        meta_fname = os.path.join(os.path.dirname(image), bname)
        console.debug("writing meta file %s" % meta_fname)
        filetool.save(meta_fname, json.dumps(config, ensure_ascii=False, sort_keys=True))
        console.outdent()

        # handle base64 type, need to write "combined image" to file
        if combtype == "base64":
            combinedMap = {}
            for sub in subconfigs:
                subMap = {}
                subId  = getImageId(sub['file'], clippedImages[sub['file']])
                subMap['width']    = sub['width']
                subMap['height']   = sub['height']
                subMap['type']     = sub['type']
                subMap['encoding'] = sub['encoding']
                subMap['data']     = sub['data']
                combinedMap[subId] = subMap
            filetool.save(image, json.dumpsCode(combinedMap))

    console.outdent()

    return
Beispiel #49
0
 def write_new(config):
     conf_str = json.dumpsPretty(config.getData())
     filetool.save(configPath, conf_str)
     return