def debug_module(pkg_name, module_name): all_pkgs = fetch("https://package.elm-lang.org/all-packages") # print(all_pkgs) # all_pkgs_dict = {p["name"]:p for p in all_pkgs} pkg_data = all_pkgs[pkg_name] # print(pkg_data) jsonURL = "/".join([ "https://package.elm-lang.org/packages", pkg_name, pkg_data[-1], "docs.json" ]) json_data = fetch(jsonURL) json_data_dict = {m["name"]: m for m in json_data} module = Module(json_data_dict[module_name], pkg_name) # print( json_data_dict[module_name]) with open("./assets/debug.html", "wb") as fo: data = { "pkg_link": (pkg_name, "#"), "module_name": module.name, "markdown": toHtml(module.markdown).replace('<code>', '<code class="elm">') } fo.write(moduleTemplate(data))
def generate_all(): global pkgs print ("feching all packages list ..."), all_pkgs = fetch(pkgsURL + "all-packages") print ("DONE!") print ("feching new packages list ..."), new_pkgs = fetch(pkgsURL + "new-packages") print ("DONE!") new_pkgs = list(set(new_pkgs)) all_pkgs_dict = {p["name"]: p for p in all_pkgs} deprecated = [p for p in all_pkgs_dict.iteritems() if not p in new_pkgs] pkgs = [p for p in all_pkgs if p["name"] in new_pkgs] pkgs.sort(key=lambda a: a["name"].lower()) # generate the index with open(opj(docpath, "index.html"), "w") as fo: fo.write(indexTemplate({"pkgs": [(pkg["name"], docname(pkg["name"]), pkg["summary"]) for pkg in pkgs]})) no_pkgs = len(pkgs) for pkg in pkgs: idx = pkgs.index(pkg) + 1 pkg_name = pkg["name"] pkg_file = docname(pkg_name) pkg_version = pkg["versions"][0] print "Generating package: " + pkg_name + " [% 3d / %03d]..." % (idx, no_pkgs), docURL = pkgsURL + "/".join(["packages", pkg_name, pkg_version, "documentation"]) + ".json" json = fetch(docURL) # module = Module(json) links = [] for module_json in json: moduleJsonURL = ( pkgsURL + "/".join(["packages", pkg_name, pkg_version, "docs", module_json["name"].replace(".", "-")]) + ".json" ) module = Module(fetch(moduleJsonURL), pkg_name) module_file = docname(pkg_name, module.name) links.append((module.name, module_file)) with open(opj(docpath, module_file), "w") as fo: html = toHtml(module.markdown).replace("<code>", '<code class="elm">') # fix syntax detection data = {"pkg_link": (pkg_name, pkg_file), "module_name": module.name, "markdown": html} fo.write(moduleTemplate(data)) cur.execute( "INSERT OR IGNORE INTO searchIndex(name, type, path) VALUES (?,?,?)", (module.name, "Module", module_file), ) with open(opj(docpath, pkg_file), "w") as fo: data = {"pkg_name": pkg_name, "modules": links, "version": pkg_version} fo.write(pkgTemplate(data)) cur.execute( "INSERT OR IGNORE INTO searchIndex(name, type, path) VALUES (?,?,?)", (pkg_name, "Package", pkg_file) ) print "DONE!"
def asset_url(self): def get_final(): self.consume() return self.final cached = cache.fetch('%s-url' % self.url, get_final, expires = 3 * cache.TIME_HOUR) log.info(cached) return cached
def reqsLong(topic): name = topic+" longreqs" fetched = fetch(name) if fetched: return fetched art = wiki.search(topic) reqs = getLongReqs(art) return {'name': art.title, 'reqs': reqs}
def debug_module(pkg_name, module_name): all_pkgs = fetch("http://package.elm-lang.org/all-packages") all_pkgs_dict = {p["name"]:p for p in all_pkgs} pkg_data = all_pkgs_dict[pkg_name] jsonURL = "/".join(["http://package.elm-lang.org/packages", pkg_name, pkg_data["versions"][0], "documentation.json"]) json_data = fetch(jsonURL) json_data_dict = {m["name"]:m for m in json_data} module = Module(json_data_dict[module_name], pkg_name) # print json_data_dict[module_name] with open("./assetts/debug.html", "w") as fo: data = { "pkg_link": (pkg_name, "#"), "module_name":module.name, "markdown":toHtml(module.markdown)} fo.write(moduleTemplate(data))
def file_name(self): def get_fname(): self.consume() return self.helper({ 'method': 'file_name', 'asset_url': self.asset_url() }) if not self.fname: self.fname = cache.fetch('%s-fname' % self.url, get_fname, expires = cache.TIME_DAY) return self.fname
def asset_url(self): def get_final(): self.consume() return self.final ttl = 3 * cache.TIME_HOUR cached = cache.fetch(self.url_cache_key, get_final, expires=ttl) log.info(cached) return cached
def asset_url(self): def get_final(): self.consume() return self.final ttl = 3 * cache.TIME_HOUR cached = cache.fetch(self.url_cache_key, get_final, expires = ttl) log.info(cached) return cached
def file_name(self): def get_fname(): self.consume() return self.helper({ 'method': 'file_name', 'asset_url': self.asset_url }) if not self.fname: ttl = cache.TIME_DAY self.fname = cache.fetch(self.file_cache_key, get_fname, expires = ttl) log.info(self.fname) return self.fname
def prereqs(topic): name = topic+" prereqs" fetched = fetch(name) if fetched: return fetched art = wiki.search(topic) reqs = getReqs(art) for req in reqs: review(req) # without grabbing return, works as pure caching function cache(name, reqs) return reqs
def info(topic): name = topic+" info" fetched = fetch(name) if fetched: return fetched art = wiki.search(topic) info = {} info['name'] = art.title info['text'] = art.sections[0].string cache(name, info) return info
def quiz(topic): name = topic+" quiz" fetched = fetch(name) if fetched: return fetched art = wiki.search(topic) quiz = {} quiz['name'] = art.title quiz['description'] = getDescription(art) quiz['distractors'] = getDists(art) quiz['prereqs'] = getReqs(art) cache(name, quiz) return quiz
def debug_module(pkg_name, module_name): all_pkgs = fetch("http://package.elm-lang.org/all-packages") all_pkgs_dict = {p["name"]: p for p in all_pkgs} pkg_data = all_pkgs_dict[pkg_name] jsonURL = "/".join([ "http://package.elm-lang.org/packages", pkg_name, pkg_data["versions"][0], "documentation.json" ]) json_data = fetch(jsonURL) json_data_dict = {m["name"]: m for m in json_data} module = Module(json_data_dict[module_name], pkg_name) # print json_data_dict[module_name] with open("./assetts/debug.html", "w") as fo: data = { "pkg_link": (pkg_name, "#"), "module_name": module.name, "markdown": toHtml(module.markdown) } fo.write(moduleTemplate(data))
def review(topic): name = topic+" review" fetched = fetch(name) if fetched: return fetched art = wiki.search(topic) rev = {} if art.disambiguation: raise DisambiguationError() rev['name'] = art.title rev['description'] = getDescription(art) rev['distractors'] = getDists(art) cache(name, rev) return rev
def __init__(self, endpoint, environment = environment.default, avoid_flv = False): super(Wizard, self).__init__() self.endpoint = endpoint self.file_hint = None self.avoid_flv = avoid_flv self.environment = environment try: def get_sources(): return util.gzip_request(util.sources_endpoint(self.endpoint)) self.payload = self.environment.str_to_json(cache.fetch('%s-sources' % self.endpoint, get_sources, expires = cache.TIME_HOUR / 2)) self.file_hint = self.payload['resource']['display_title'] except Exception, e: #util.print_exception(e) pass
def file_name(self): def get_fname(): self.consume() return self.helper({ 'method': 'file_name', 'asset_url': self.asset_url }) if not self.fname: ttl = cache.TIME_DAY self.fname = cache.fetch(self.file_cache_key, get_fname, expires=ttl) log.info(self.fname) return self.fname
def generate_all(): global pkgs print("feching all packages list ..."), all_pkgs = requests.get(pkgsURL+"search.json").json() print("DONE!") pkgs = sorted(all_pkgs, key=lambda a: a["name"].lower()) # generate the index with open(opj(docpath, "index.html"), "wb") as fo: fo.write(indexTemplate({"pkgs":[(pkg["name"], docname(pkg["name"]), pkg["summary"]) for pkg in pkgs]})) no_pkgs = len(pkgs) for pkg in pkgs: idx = pkgs.index(pkg)+1 pkg_name = pkg["name"] pkg_file = docname(pkg_name) try: pkg_version = pkg["version"] except IndexError: print ("No version found, skipping package: %s"%pkg_name) continue print ("Generating package: "+pkg_name+" [% 3d / %03d]..."%(idx, no_pkgs), end="") json = fetch( pkgsURL+"/".join(["packages", pkg_name, pkg_version, "docs"])+".json") # module = Module(json) links = [] for module_json in json: module = Module(module_json, pkg_name) module_file = docname(pkg_name, module.name) links.append((module.name, module_file)) with open(opj(docpath, module_file), "wb") as fo: html = toHtml(module.markdown).replace('<code>', '<code class="elm">') # fix syntax detection data = { "pkg_link": (pkg_name, pkg_file), "module_name":module.name, "markdown":html, "pkg_name":pkg_name, "version":pkg_version} fo.write(moduleTemplate(data)) cur.execute('INSERT OR IGNORE INTO searchIndex(name, type, path) VALUES (?,?,?)', (module.name + ' (' + pkg_name + ')', 'Module', module_file)) with open(opj(docpath, pkg_file), "wb") as fo: data = { "pkg_name": pkg_name, "modules":links, "version":pkg_version} fo.write(pkgTemplate(data)) cur.execute('INSERT OR IGNORE INTO searchIndex(name, type, path) VALUES (?,?,?)', (pkg_name, 'Package', pkg_file)) print ("DONE!")
def find_procedure(self): import urlparse def get_procedures(): return util.gzip_request(util.procedures_endpoint()) nil, domain, nil, nil, nil, nil = urlparse.urlparse(self.url) found = None procedures = self.environment.str_to_json(cache.fetch('procedures.json', get_procedures, expires = cache.TIME_DAY)) for proc_domain in procedures.iterkeys(): if proc_domain in domain: found = proc_domain break if found: proc = procedures[found] del proc[0] return proc
def debug_module(pkg_name, module_name): all_pkgs = requests.get("http://package.elm-lang.org/search.json").json() pkgs = sorted(all_pkgs, key=lambda a: a["name"].lower()) all_pkgs_dict = {p["name"]:p for p in pkgs} pkg_data = all_pkgs_dict[pkg_name] # print(pkg_data); jsonURL = "/".join(["http://package.elm-lang.org/packages", pkg_name, pkg_data["version"], "docs.json"]) json_data = fetch(jsonURL) json_data_dict = {m["name"]:m for m in json_data} #print(jsonURL) module = Module(json_data_dict[module_name], pkg_name) # print json_data_dict[module_name] # print module.markdown with open("./assets/debug.html", "w") as fo: data = { "pkg_link": (pkg_name, "#"), "module_name":module.name, "markdown":toHtml(module.markdown).replace('<code>', '<code class="elm">')} fo.write(moduleTemplate(data))
def gitRM(this, name): readme = fetch("/".join([gitHub, name, "raw/master", "README.md"]), False) result = toHtml(readme) return result
def generate_all(): global pkgs print("feching all packages list ..."), all_pkgs = fetch(pkgsURL + "all-packages") print("DONE!") print("feching new packages list ..."), new_pkgs = fetch(pkgsURL + "new-packages") print("DONE!") new_pkgs = list(set(new_pkgs)) all_pkgs_dict = {p["name"]: p for p in all_pkgs} deprecated = [p for p in all_pkgs_dict.items() if not p in new_pkgs] pkgs = [p for p in all_pkgs if p["name"] in new_pkgs] pkgs.sort(key=lambda a: a["name"].lower()) # generate the index with open(opj(docpath, "index.html"), "wb") as fo: fo.write( indexTemplate({ "pkgs": [(pkg["name"], docname(pkg["name"]), pkg["summary"]) for pkg in pkgs] })) no_pkgs = len(pkgs) for pkg in pkgs: idx = pkgs.index(pkg) + 1 pkg_name = pkg["name"] pkg_file = docname(pkg_name) pkg_version = pkg["versions"][0] print("Generating package: " + pkg_name + " [% 3d / %03d]..." % (idx, no_pkgs), end="") docURL = pkgsURL + "/".join( ["packages", pkg_name, pkg_version, "documentation"]) + ".json" json = fetch(docURL) # module = Module(json) links = [] for module_json in json: moduleJsonURL = pkgsURL + "/".join([ "packages", pkg_name, pkg_version, "docs", module_json["name"].replace(".", "-") ]) + ".json" module = Module(fetch(moduleJsonURL), pkg_name) module_file = docname(pkg_name, module.name) links.append((module.name, module_file)) with open(opj(docpath, module_file), "wb") as fo: html = toHtml(module.markdown).replace( '<code>', '<code class="elm">') # fix syntax detection data = { "pkg_link": (pkg_name, pkg_file), "module_name": module.name, "markdown": html } fo.write(moduleTemplate(data)) cur.execute( 'INSERT OR IGNORE INTO searchIndex(name, type, path) VALUES (?,?,?)', (module.name, 'Module', module_file)) with open(opj(docpath, pkg_file), "wb") as fo: data = { "pkg_name": pkg_name, "modules": links, "version": pkg_version } fo.write(pkgTemplate(data)) cur.execute( 'INSERT OR IGNORE INTO searchIndex(name, type, path) VALUES (?,?,?)', (pkg_name, 'Package', pkg_file)) print("DONE!")
def get_bus_locations(stop_number): if cached_bus_locations_list := cache.fetch(stop_number, BusLocationList): print('RETURN FROM CACHE') return cached_bus_locations_list