def static_html(name): if name in aliases: redirect(aliases[name]) linkheaders = ["</style.css>; rel=preload; as=style"] keys = remove_identical(FormsDict.decode(request.query)) adminmode = request.cookies.get("adminmode") == "true" and auth.check( request) clock = Clock() clock.start() LOCAL_CONTEXT = { "adminmode": adminmode, "apikey": request.cookies.get("apikey") if adminmode else None, "_urikeys": keys, #temporary! } lc = LOCAL_CONTEXT lc["filterkeys"], lc["limitkeys"], lc["delimitkeys"], lc["amountkeys"], lc[ "specialkeys"] = uri_to_internal(keys) template = jinja_environment.get_template(name + '.jinja') try: res = template.render(**LOCAL_CONTEXT) except ValueError as e: abort(404, "Entity does not exist") if settings.get_settings("DEV_MODE"): jinja_environment.cache.clear() log("Generated page {name} in {time:.5f}s".format(name=name, time=clock.stop()), module="debug_performance") return clean_html(res)
def trackInfo_external(): keys = FormsDict.decode(request.query) k_filter, _, _, _ = uri_to_internal(keys, forceTrack=True) ckeys = {**k_filter} results = trackInfo(**ckeys) return results
def get_performance_external(): keys = FormsDict.decode(request.query) k_filter, k_time, k_internal, k_amount = uri_to_internal(keys) ckeys = {**k_filter, **k_time, **k_internal, **k_amount} results = get_performance(**ckeys) return {"list": results}
def get_charts_artists_external(): keys = FormsDict.decode(request.query) _, k_time, _, _ = uri_to_internal(keys) ckeys = {**k_time} result = get_charts_artists(**ckeys) return {"list": result}
def get_charts_tracks_external(): keys = FormsDict.decode(request.query) k_filter, k_time, _, _ = uri_to_internal(keys, forceArtist=True) ckeys = {**k_filter, **k_time} result = get_charts_tracks(**ckeys) return {"list": result}
def get_scrobbles_num_external(): keys = FormsDict.decode(request.query) k_filter, k_time, _, k_amount = uri_to_internal(keys) ckeys = {**k_filter, **k_time, **k_amount} result = get_scrobbles_num(**ckeys) return {"amount": result}
def post_scrobble(): keys = FormsDict.decode(request.forms) # The Dal★Shabet handler artists = keys.get("artist") title = keys.get("title") apikey = keys.get("key") if not (checkAPIkey(apikey)): response.status = 403 return "" try: time = int(keys.get("time")) except: time = int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp()) (artists, title) = cla.fullclean(artists, title) ## this is necessary for localhost testing #response.set_header("Access-Control-Allow-Origin","*") trackdict = createScrobble(artists, title, time) #if (time - lastsync) > 3600: # sync() sync() #always sync, one filesystem access every three minutes shouldn't matter return {"status": "success", "track": trackdict}
def get_top_artists_external(): keys = FormsDict.decode(request.query) _, k_time, k_internal, _ = uri_to_internal(keys) ckeys = {**k_time, **k_internal} results = get_top_artists(**ckeys) return {"list": results}
def newrule(): keys = FormsDict.decode(request.forms) apikey = keys.pop("key", None) if (checkAPIkey(apikey)): tsv.add_entry("rules/webmade.tsv", [k for k in keys]) #addEntry("rules/webmade.tsv",[k for k in keys]) global db_rulestate db_rulestate = False
def test_decode_method(self): """ FomsDict.attribute returs u'' on UnicodeError. """ data = tob('äöü') d = FormsDict(py2=data, py3=data.decode('latin1')) d = d.decode() self.assertFalse(d.recode_unicode) self.assertTrue(hasattr(list(d.keys())[0], 'encode')) self.assertTrue(hasattr(list(d.values())[0], 'encode'))
def test_decode_method(self): """ FomsDict.attribute returs u'' on UnicodeError. """ data = tob('äöü') d = FormsDict(py2=data, py3=data.decode('latin1')) d = d.decode() self.assertFalse(d.recode_unicode) self.assertEqual(unicode, type(list(d.keys())[0])) self.assertEqual(unicode, type(list(d.values())[0]))
def get_top_tracks_external(): keys = FormsDict.decode(request.query) _, k_time, k_internal, _ = uri_to_internal(keys) ckeys = {**k_time, **k_internal} # IMPLEMENT THIS FOR TOP TRACKS OF ARTIST AS WELL? results = get_top_tracks(**ckeys) return {"list": results}
def route(self, fullpath): # preprocess all requests headers = request.headers keys = FormsDict.decode(request.query) if self.debug: log("Request to " + fullpath) for k in keys: log("\t" + k + " = " + keys.get(k)) if request.get_header( "Content-Type" ) is not None and "application/json" in request.get_header( "Content-Type"): json = request.json if request.json is not None else {} keys.update(json) else: formdict = FormsDict.decode(request.forms) for k in formdict: for v in formdict.getall(k): keys[k] = v #keys.update(FormsDict.decode(request.forms)) #print(keys) nodes = fullpath.split("/") reqmethod = request.method if self.auth(request): result = self.handle(nodes, reqmethod, keys, headers) if isinstance(result, Response): return result else: result = serialize(result) result = format_output[self.type](result, root_node=self.rootnode) return result else: response.status = 403 return "Access denied"
def sapi(path): path = path.split("/") path = list(filter(None, path)) headers = request.headers if request.get_header( "Content-Type" ) is not None and "application/json" in request.get_header("Content-Type"): keys = request.json else: keys = FormsDict.decode(request.params) auth = request.auth return compliant_api.handle(path, keys, headers, auth)
def search(): keys = FormsDict.decode(request.query) query = keys.get("query") max_ = keys.get("max") if max_ is not None: max_ = int(max_) query = query.lower() artists = db_search(query,type="ARTIST") tracks = db_search(query,type="TRACK") # if the string begins with the query it's a better match, if a word in it begins with it, still good # also, shorter is better (because longer titles would be easier to further specify) artists.sort(key=lambda x: ((0 if x.lower().startswith(query) else 1 if " " + query in x.lower() else 2),len(x))) tracks.sort(key=lambda x: ((0 if x["title"].lower().startswith(query) else 1 if " " + query in x["title"].lower() else 2),len(x["title"]))) return {"artists":artists[:max_],"tracks":tracks[:max_]}
def static_html(name): linkheaders = ["</css/maloja.css>; rel=preload; as=style"] keys = remove_identical(FormsDict.decode(request.query)) with open("website/" + name + ".html") as htmlfile: html = htmlfile.read() # apply global substitutions with open("website/common/footer.html") as footerfile: footerhtml = footerfile.read() with open("website/common/header.html") as headerfile: headerhtml = headerfile.read() html = html.replace("</body>", footerhtml + "</body>").replace( "</head>", headerhtml + "</head>") # If a python file exists, it provides the replacement dict for the html file if os.path.exists("website/" + name + ".py"): #txt_keys = SourceFileLoader(name,"website/" + name + ".py").load_module().replacedict(keys,DATABASE_PORT) try: content = SourceFileLoader(name, "website/" + name + ".py").load_module().instructions(keys) if isinstance(content, str): redirect(content) txt_keys, resources = content except HTTPResponse as e: raise except Exception as e: log("Error in website generation: " + str(sys.exc_info()), module="error") raise # add headers for server push for resource in resources: if all(ord(c) < 128 for c in resource["file"]): # we can only put ascii stuff in the http header linkheaders.append("<" + resource["file"] + ">; rel=preload; as=" + resource["type"]) # apply key substitutions for k in txt_keys: if isinstance(txt_keys[k], list): # if list, we replace each occurence with the next item for element in txt_keys[k]: html = html.replace(k, element, 1) else: html = html.replace(k, txt_keys[k]) response.set_header("Link", ",".join(linkheaders)) return html
def rebuild(): keys = FormsDict.decode(request.forms) apikey = keys.pop("key", None) if (checkAPIkey(apikey)): log("Database rebuild initiated!") global db_rulestate db_rulestate = False sync() os.system("python3 fixexisting.py") global cla, coa cla = CleanerAgent() coa = CollectorAgent() build_db() invalidate_caches()
def import_rulemodule(): keys = FormsDict.decode(request.forms) apikey = keys.pop("key",None) if (checkAPIkey(apikey)): filename = keys.get("filename") remove = keys.get("remove") is not None validchars = "-_abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" filename = "".join(c for c in filename if c in validchars) if remove: log("Deactivating predefined rulefile " + filename) os.remove("rules/" + filename + ".tsv") else: log("Importing predefined rulefile " + filename) os.symlink("predefined/" + filename + ".tsv","rules/" + filename + ".tsv")
def database_get(pth): keys = FormsDict.decode(request.query) # The Dal★Shabet handler keystring = "?" for k in keys: keystring += urllib.parse.quote(k) + "=" + urllib.parse.quote( keys[k]) + "&" response.set_header("Access-Control-Allow-Origin", "*") try: proxyresponse = urllib.request.urlopen("http://[::1]:" + str(DATABASE_PORT) + "/" + pth + keystring) contents = proxyresponse.read() response.status = proxyresponse.getcode() response.content_type = "application/json" return contents except HTTPError as e: response.status = e.code return
def search(): keys = FormsDict.decode(request.query) query = keys.get("query") max_ = keys.get("max") if max_ is not None: max_ = int(max_) query = query.lower() artists = db_search(query, type="ARTIST") tracks = db_search(query, type="TRACK") # if the string begins with the query it's a better match, if a word in it begins with it, still good # also, shorter is better (because longer titles would be easier to further specify) artists.sort(key=lambda x: ((0 if x.lower().startswith(query) else 1 if " " + query in x.lower() else 2), len(x))) tracks.sort(key=lambda x: ( (0 if x["title"].lower().startswith(query) else 1 if " " + query in x["title"].lower() else 2), len(x["title"]))) # add links artists_result = [] for a in artists: result = {"name": a} result["link"] = "/artist?" + compose_querystring( internal_to_uri({"artist": a})) result["image"] = "/image?" + compose_querystring( internal_to_uri({"artist": a})) artists_result.append(result) tracks_result = [] for t in tracks: result = t result["link"] = "/track?" + compose_querystring( internal_to_uri({"track": t})) result["image"] = "/image?" + compose_querystring( internal_to_uri({"track": t})) tracks_result.append(result) return {"artists": artists_result[:max_], "tracks": tracks_result[:max_]}
def pseudo_post_scrobble(): keys = FormsDict.decode(request.query) # The Dal★Shabet handler artists = keys.get("artist") title = keys.get("title") apikey = keys.get("key") if not (checkAPIkey(apikey)): response.status = 403 return "" try: time = int(keys.get("time")) except: time = int(datetime.datetime.now(tz=datetime.timezone.utc).timestamp()) (artists,title) = cla.fullclean(artists,title) ## this is necessary for localhost testing response.set_header("Access-Control-Allow-Origin","*") createScrobble(artists,title,time) if (time - lastsync) > 3600: sync() return ""
def static_html(name): linkheaders = ["</style.css>; rel=preload; as=style"] keys = remove_identical(FormsDict.decode(request.query)) pyhp_file = os.path.exists(pthjoin(WEBFOLDER, "pyhp", name + ".pyhp")) html_file = os.path.exists(pthjoin(WEBFOLDER, name + ".html")) jinja_file = os.path.exists(pthjoin(WEBFOLDER, "jinja", name + ".jinja")) pyhp_pref = settings.get_settings("USE_PYHP") jinja_pref = settings.get_settings("USE_JINJA") adminmode = request.cookies.get( "adminmode") == "true" and database.checkAPIkey( request.cookies.get("apikey")) is not False clock = Clock() clock.start() # if a jinja file exists, use this if (jinja_file and jinja_pref) or (jinja_file and not html_file and not pyhp_file): LOCAL_CONTEXT = { "adminmode": adminmode, "apikey": request.cookies.get("apikey") if adminmode else None, "_urikeys": keys, #temporary! } LOCAL_CONTEXT["filterkeys"], LOCAL_CONTEXT["limitkeys"], LOCAL_CONTEXT[ "delimitkeys"], LOCAL_CONTEXT["amountkeys"] = uri_to_internal(keys) template = jinjaenv.get_template(name + '.jinja') res = template.render(**LOCAL_CONTEXT) log("Generated page {name} in {time:.5f}s (Jinja)".format( name=name, time=clock.stop()), module="debug") return res # if a pyhp file exists, use this elif (pyhp_file and pyhp_pref) or (pyhp_file and not html_file): #things we expose to the pyhp pages environ = { "adminmode": adminmode, "apikey": request.cookies.get("apikey") if adminmode else None, # maloja "db": database, "htmlmodules": htmlmodules, "htmlgenerators": htmlgenerators, "malojatime": malojatime, "utilities": utilities, "urihandler": urihandler, "settings": settings.get_settings, # external "urllib": urllib } # request environ["filterkeys"], environ["limitkeys"], environ[ "delimitkeys"], environ["amountkeys"] = uri_to_internal(keys) environ["_urikeys"] = keys #temporary! #response.set_header("Content-Type","application/xhtml+xml") res = pyhpfile(pthjoin(WEBFOLDER, "pyhp", name + ".pyhp"), environ) log("Generated page {name} in {time:.5f}s (PYHP)".format( name=name, time=clock.stop()), module="debug") return res # if not, use the old way else: with open(pthjoin(WEBFOLDER, name + ".html")) as htmlfile: html = htmlfile.read() # apply global substitutions with open(pthjoin(WEBFOLDER, "common/footer.html")) as footerfile: footerhtml = footerfile.read() with open(pthjoin(WEBFOLDER, "common/header.html")) as headerfile: headerhtml = headerfile.read() html = html.replace("</body>", footerhtml + "</body>").replace( "</head>", headerhtml + "</head>") # If a python file exists, it provides the replacement dict for the html file if os.path.exists(pthjoin(WEBFOLDER, name + ".py")): #txt_keys = SourceFileLoader(name,"web/" + name + ".py").load_module().replacedict(keys,DATABASE_PORT) try: module = importlib.import_module(".web." + name, package="maloja") txt_keys, resources = module.instructions(keys) except Exception as e: log("Error in website generation: " + str(sys.exc_info()), module="error") raise # add headers for server push for resource in resources: if all(ord(c) < 128 for c in resource["file"]): # we can only put ascii stuff in the http header linkheaders.append("<" + resource["file"] + ">; rel=preload; as=" + resource["type"]) # apply key substitutions for k in txt_keys: if isinstance(txt_keys[k], list): # if list, we replace each occurence with the next item for element in txt_keys[k]: html = html.replace(k, element, 1) else: html = html.replace(k, txt_keys[k]) response.set_header("Link", ",".join(linkheaders)) log("Generated page {name} in {time:.5f}s (Python+HTML)".format( name=name, time=clock.stop()), module="debug") return html
def dynamic_image(): keys = FormsDict.decode(request.query) relevant, _, _, _ = uri_to_internal(keys) result = resolveImage(**relevant) if result == "": return "" redirect(result, 307)
def test_decode_method(self): d = FormsDict(py2=tob('瓶'), py3=tob('瓶').decode('latin1')) d = d.decode() self.assertFalse(d.recode_unicode) self.assertTrue(hasattr(list(d.keys())[0], 'encode')) self.assertTrue(hasattr(list(d.values())[0], 'encode'))
def document(project): forms = [Form(form, project.forms[form]) for form in project.order] [form.validate(FormsDict.decode(request.query)) for form in forms] return template("document", {'project': project, 'forms': forms})
def __init__(self, name, terms, request): self.name = name self.terms = terms self.request = request self.query = FormsDict.decode(request.query)