def patchConfig(configPath, migVersions): def loadConfpy(confpyPath): namespace = {} execfile(confpyPath, namespace) return namespace def handleMap(extmap, key, action): changed = False # string-value action if key in extmap: if isinstance(action, types.StringTypes): # it's a rename if action == "": # it's a delete extmap.delete(key) else: extmap.rename(key, action) # function-value action elif isinstance(action, types.FunctionType): currval = extmap.get(key) action(extmap, key, currval) changed = True return changed def applyToConfig(config, confpy): # load conf.py confMig = loadConfpy(confpy) changed = False for key, action in confMig["transformations"].items(): if not key.startswith("/"): # job-level key for job in config.get("jobs").values(): job = ExtMap(job) changed = handleMap(job, key, action) or changed return changed def write_new(config): conf_str = json.dumpsPretty(config.getData()) filetool.save(configPath, conf_str) return def write_backup(configPath): import shutil configPathBackup = configPath + ".orig" if not os.path.exists(configPathBackup): shutil.copy(configPath, configPathBackup) return def get_confpy(vers): res = None versionPatchPath = os.path.join(getPatchDirectory(), vers) # require a potential config.py in the root directory if os.path.exists(versionPatchPath + '/' + "config.py"): res = os.path.join(versionPatchPath, "config.py") return res # -------------------------------------------------------------------------- # get current config config = json.loadStripComments(configPath) config = ExtMap(config) # apply migration files changed = False for vers in migVersions: confpy = get_confpy(vers) if confpy: changed = applyToConfig(config, confpy) or changed # write new config if changed: # backup old config write_backup(configPath) # write new config write_new(config) return
def __init_fname(self, fname): try: data = json.loadStripComments(fname) except ValueError, e: e.args = (e.args[0] + "\nFile: %s" % fname, ) + e.args[1:] raise e
def patchConfig(configPath, migVersions): def loadConfpy(confpyPath): namespace = {} execfile(confpyPath, namespace) return namespace def handleMap(extmap, key, action): changed = False # string-value action if key in extmap: if isinstance(action, types.StringTypes): # it's a rename if action == "": # it's a delete extmap.delete(key) else: extmap.rename(key, action) # function-value action elif isinstance(action, types.FunctionType): currval = extmap.get(key) action(extmap, key, currval) changed = True return changed def applyToConfig(config, confpy): # load conf.py confMig = loadConfpy(confpy) changed = False for key, action in confMig["transformations"].items(): if not key.startswith("/"): # job-level key for job in config.get("jobs").values(): job = ExtMap(job) changed = handleMap(job, key, action) or changed return changed def write_new(config): conf_str = json.dumpsPretty(config.getData()) filetool.save(configPath, conf_str) return def write_backup(configPath): import shutil configPathBackup = configPath + ".orig" if not os.path.exists(configPathBackup): shutil.copy(configPath, configPathBackup) return def get_confpy(vers): res = None versionPatchPath = os.path.join(getPatchDirectory(), vers) # require a potential config.py in the root directory if os.path.exists(versionPatchPath + '/' + "config.py"): res = os.path.join(versionPatchPath, "config.py") return res # -------------------------------------------------------------------------- # get current config config = json.loadStripComments(configPath) config = ExtMap(config) # apply migration files changed = False for vers in migVersions: confpy = get_confpy(vers) if confpy: changed = applyToConfig(config, confpy) or changed # write new config if changed: # backup old config write_backup(configPath) # write new config write_new(config) return
def CreateDemoJson(): source = [] build = [] scategories = {} bcategories = {} # Pre-processing JSON = {} # top-level includes default_json = 'tool' + '/' + 'default.json' assert os.path.isfile(default_json) JSON['include'] = [{ "path" : "%s" % default_json }] # per-demo template file json_tmpl = open(os.path.join('tool','tmpl.json'),"rU").read() # jobs section JSON['jobs'] = {} # allow exported jobs to be shadowed JSON['config-warnings'] = {} shadowed_jobs = [] JSON['config-warnings']['job-shadowing'] = shadowed_jobs # Process demo html files while True: html = (yield) #print html if html == None: # terminate the generator part and go to finalizing json file break category, name = demoCategoryFromFile(html) #print ">>> Processing: %s.%s..." % (category, name) # check for demo-specific config file config_file = os.path.splitext(html)[0] + ".json" if os.path.exists(config_file): JSON['include'].append({"path":"%s" % config_file}) demo_config = json.loadStripComments(config_file) shadowed_jobs.extend(demo_config['export']) # build classname simple = "%s.%s" % (category,name) source.append("source-%s" % simple) build.append("build-%s" % simple) if not category in scategories: scategories[category] = [] scategories[category].append("source-%s" % (simple,)) if not category in bcategories: bcategories[category] = [] bcategories[category].append("build-%s" % (simple,)) # concat all currcont = json_tmpl.replace('XXX',"%s.%s"%(category,name)).replace("YYY",name).replace("ZZZ",category) templatejobs = json.loads("{" + currcont + "}") for job,jobval in templatejobs.iteritems(): JSON['jobs'][job] = jobval # Post-processing for category in scategories: currentry = JSON['jobs']["source-%s" % category] = {} currentry['run'] = sorted(scategories[category]) currentry = JSON['jobs']["build-%s" % category] = {} currentry['run'] = sorted(bcategories[category]) JSON['jobs']["source"] = { "run" : sorted(source) } JSON['jobs']["build"] = { "run" : sorted(build) } cont = '// This file is dynamically created by the generator!\n' cont += json.dumps(JSON, sort_keys=True, indent=2) filetool.save(fJSON, cont) yield # final yield to provide for .send(None) of caller
def __init_fname(self, fname): try: data = json.loadStripComments(fname) except ValueError, e: e.args = (e.args[0] + "\nFile: %s" % fname,) + e.args[1:] raise e
def CreateDemoJson(dest, qxdir): source = [] build = [] scategories = {} bcategories = {} # Pre-processing JSON = {} # top-level includes default_json = os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])) , 'default.json') assert os.path.isfile(default_json) JSON['include'] = [{ "path" : "%s" % default_json }] # per-demo template file tmpl_json = os.path.join(os.path.dirname(sys.argv[0]) , 'tmpl.json') tmpl_manifest = os.path.join(os.path.dirname(sys.argv[0]) , TMPL_MANIFEST) tmpl_config = os.path.join(os.path.dirname(sys.argv[0]) , TMPL_CONFIG) json_tmpl = open(tmpl_json,"rU").read() demo_ns = "%s.demo" % namespace manifest_tmpl = json.loads(open(tmpl_manifest, 'rU').read()) manifest_tmpl['provides']['namespace'] = demo_ns config_tmpl = json.loads(open(tmpl_config, 'rU').read()) config_tmpl['let']['QOOXDOO_PATH'] = os.path.join('..', qxdir) config_tmpl['jobs']['source-demos']['let']['APPLICATION'] = demo_ns fn = os.path.basename(tmpl_manifest)[len('tmpl.'):] # file name open(os.path.join(dest, '..', '..', fn), 'w').write(json.dumps(manifest_tmpl, indent=2, sort_keys=True)) fn = os.path.basename(tmpl_config)[len('tmpl.'):] open(os.path.join(dest, '..', '..', fn), 'w').write(json.dumps(config_tmpl, indent=2, sort_keys=True)) # jobs section JSON['jobs'] = {} # allow exported jobs to be shadowed JSON['config-warnings'] = {} shadowed_jobs = [] JSON['config-warnings']['job-shadowing'] = shadowed_jobs # Process demo html files while True: html = (yield) #print html if html == None: # terminate the generator part and go to finalizing json file break category, name = demoCategoryFromFile(html) #print ">>> Processing: %s.%s..." % (category, name) # check for demo-specific config file config_file = os.path.splitext(html)[0] + ".json" if os.path.exists(config_file): JSON['include'].append({"path":"%s" % config_file}) demo_config = json.loadStripComments(config_file) shadowed_jobs.extend(demo_config['export']) # build classname simple = "%s.%s" % (category,name) source.append("source-%s" % simple) build.append("build-%s" % simple) if not category in scategories: scategories[category] = [] scategories[category].append("source-%s" % (simple,)) if not category in bcategories: bcategories[category] = [] bcategories[category].append("build-%s" % (simple,)) # concat all currcont = json_tmpl.replace('XXX',"%s.%s"%(category,name)).replace("YYY",name).replace("ZZZ",category) templatejobs = json.loads("{" + currcont + "}") for job,jobval in templatejobs.iteritems(): JSON['jobs'][job] = jobval # Post-processing for category in scategories: currentry = JSON['jobs']["source-%s" % category] = {} currentry['run'] = sorted(scategories[category]) currentry = JSON['jobs']["build-%s" % category] = {} currentry['run'] = sorted(bcategories[category]) JSON['jobs']["source"] = { "run" : sorted(source) } JSON['jobs']["build"] = { "run" : sorted(build) } cont = '// This file is dynamically created by the generator!\n' cont += json.dumps(JSON, sort_keys=True, indent=2) filetool.save(os.path.join('demobrowser', fJSON), cont) yield # final yield to provide for .send(None) of caller
def CreateDemoJson(): source = [] build = [] scategories = {} bcategories = {} # Pre-processing JSON = {} # top-level includes default_json = 'tool' + '/' + 'default.json' assert os.path.isfile(default_json) JSON['include'] = [{"path": "%s" % default_json}] # per-demo template file json_tmpl = open(os.path.join('tool', 'tmpl.json'), "rU").read() # jobs section JSON['jobs'] = {} # allow exported jobs to be shadowed JSON['config-warnings'] = {} shadowed_jobs = [] JSON['config-warnings']['job-shadowing'] = shadowed_jobs # Process demo html files while True: html = (yield) #print html if html == None: # terminate the generator part and go to finalizing json file break category, name = demoCategoryFromFile(html) #print ">>> Processing: %s.%s..." % (category, name) # check for demo-specific config file config_file = os.path.splitext(html)[0] + ".json" if os.path.exists(config_file): JSON['include'].append({"path": "%s" % config_file}) demo_config = json.loadStripComments(config_file) shadowed_jobs.extend(demo_config['export']) # build classname simple = "%s.%s" % (category, name) source.append("source-%s" % simple) build.append("build-%s" % simple) if not category in scategories: scategories[category] = [] scategories[category].append("source-%s" % (simple, )) if not category in bcategories: bcategories[category] = [] bcategories[category].append("build-%s" % (simple, )) # concat all currcont = json_tmpl.replace('XXX', "%s.%s" % (category, name)).replace( "YYY", name).replace("ZZZ", category) templatejobs = json.loads("{" + currcont + "}") for job, jobval in templatejobs.iteritems(): JSON['jobs'][job] = jobval # Post-processing for category in scategories: currentry = JSON['jobs']["source-%s" % category] = {} currentry['run'] = sorted(scategories[category]) currentry = JSON['jobs']["build-%s" % category] = {} currentry['run'] = sorted(bcategories[category]) JSON['jobs']["source"] = {"run": sorted(source)} JSON['jobs']["build"] = {"run": sorted(build)} cont = '// This file is dynamically created by the generator!\n' cont += json.dumps(JSON, sort_keys=True, indent=2) filetool.save(fJSON, cont) yield # final yield to provide for .send(None) of caller