def fetchtl(): """ Return the requested timeline and note the fetch. """ try: tlid = dbacc.reqarg("tlid", "dbid") if tlid: tl = dbacc.cfbk("Timeline", "dsId", str(tlid), required=True) else: slug = dbacc.reqarg("slug", "string") if not slug: slug = "default" slug = slug.lower() # in case someone camelcased the url. tl = dbacc.cfbk("Timeline", "slug", slug, required=True) tls = contained_timelines(tl) # Note the timeline was fetched for daily stats tracking det = { "referer": flask.request.headers.get("Referer", ""), "useragent": flask.request.headers.get("User-Agent", ""), "tlid": tl["dsId"], "tlname": tl["name"], "uid": dbacc.reqarg("uid", "dbid") } dcd = { "dsType": "DayCount", "tstamp": dbacc.timestamp(), "rtype": "tlfetch", "detail": json.dumps(det) } dbacc.write_entity(dcd) except ValueError as e: return util.serve_value_error(e) return util.respJSON(tls)
def set_point_srctls(tlid, ptidcsv): if ptidcsv: ptids = ptidcsv.split(",") for ptid in ptids: point = dbacc.cfbk("Point", "dsId", ptid) if point: point["srctl"] = tlid dbacc.write_entity(point, point["modified"])
def convert_refs(): """ Walk the database for unconverted instances and convert each. """ for entity, fcs in refcons.items(): where = "WHERE batchconv IS NULL OR batchconv != \"importconv\"" cobs = dbacc.query_entity(entity, where) for cob in cobs: cob = convert_ref_fields(cob, fcs) cob["batchconv"] = "importconv" dbacc.write_entity(cob, vck=cob["modified"])
def write_daysum_details(): where = ("WHERE rtype = \"daysum\" AND tstamp = \"" + rst["start"] + "\"" + " ORDER BY created LIMIT 1") dcs = dbacc.query_entity("DayCount", where) if len(dcs) > 0: dc = dcs[0] else: dc = {"dsType": "DayCount", "rtype": "daysum", "tstamp": rst["start"]} dc["detail"] = json.dumps(rst["dets"]) dbacc.write_entity(dc, dc.get("modified"))
def verify_db_instance(fbase, fields, fob): dbob = dbacc.cfbk(fob["dsType"], "importid", fob["importid"]) if not dbob: for fieldname, fattrs in fields.items(): if fattrs["pt"] == "image" and fob[fieldname]: imgfilename = fbase + "images/" + fob["importid"] + ".png" with open(imgfilename, 'rb') as imagefile: bdat = imagefile.read() fob[fieldname] = base64.b64encode(bdat) logging.info("Writing " + fob["dsType"] + fob["importid"]) dbacc.write_entity(fob)
def newacct(): try: emaddr = dbacc.reqarg("email", "AppUser.email", required=True) emaddr = normalize_email(emaddr) verify_new_email_valid(emaddr) pwd = dbacc.reqarg("password", "string", required=True) cretime = dbacc.nowISO() appuser = { "dsType": "AppUser", "created": cretime, "email": "placeholder", "phash": "whatever", "accessed": cretime + ";1", "completed": "[]", "started": "[]", "built": "[]" } update_email_and_password(appuser, emaddr, pwd) update_account_fields(appuser) appuser = dbacc.write_entity(appuser) dbacc.entcache.cache_put(appuser) # will need this again shortly token = token_for_user(appuser) except ValueError as e: return serve_value_error(e) return respJSON([appuser, token], audience="private")
def upldpic(): """ Form submit and monitoring for uploading a point pic. """ # flask.request.method always returns "GET", so check for file input. picfile = flask.request.files.get("picfilein") if not picfile: logging.info("upldpic ready for upload") return util.respond("Ready", mimetype="text/plain") try: appuser, _ = util.authenticate() ptid = dbacc.reqarg("ptid", "dbid", required=True) pt = dbacc.cfbk("Point", "dsId", ptid, required=True) logging.info(appuser["email"] + " upldpic Point " + str(ptid)) if not appuser["dsId"] in util.csv_to_list(pt["editors"]): raise ValueError("Not authorized to edit this point") img = Image.open(picfile) img = ImageOps.exif_transpose(img) # correct vertical orientation sizemaxdims = 400, 400 # max allowed width/height for thumbnail resize img.thumbnail(sizemaxdims) # modify, preserving aspect ratio bbuf = io.BytesIO() # file-like object for save img.save(bbuf, format="PNG") pt["pic"] = base64.b64encode(bbuf.getvalue()) pt = dbacc.write_entity(pt, pt["modified"]) except ValueError as e: logging.info("upldpic Point " + str(ptid) + ": " + str(e)) return util.serve_value_error(e) return util.respond("Done: " + pt["modified"], mimetype="text/plain")
def updpt(): """ Standard app POST call to update a Point. """ try: appuser, _ = util.authenticate() fields = [ "dsId", "dsType", "modified", "editors", "srctl", "source", "date", "text", "refs", "qtype", "communities", "regions", "categories", "tags", "srclang", "translations", "stats" ] ptdat = util.set_fields_from_reqargs(fields, {}) dbpt = verify_edit_authorization(appuser, ptdat) if dbpt: dbst = dbpt.get("srctl") if dbst and (dbst != ptdat.get("srctl")): raise ValueError("Source Timeline may not be changed.") util.fill_missing_fields(fields, dbpt, ptdat) else: # making a new instance for fld in ["srctl", "date", "text"]: if not ptdat.get(fld): # required point field value raise ValueError("Point " + fld + " value is required.") # date format validity checking is done client side remove_html_from_point_fields(ptdat) ptdat["lmuid"] = appuser["dsId"] pt = dbacc.write_entity(ptdat, ptdat.get("modified", "")) except ValueError as e: return util.serve_value_error(e) return util.respJSON(pt)
def updtl(): """ Standard app POST call to update a Timeline. """ try: appuser, _ = util.authenticate() tlfs = [ "dsId", "dsType", "modified", "editors", "name", "slug", "title", "subtitle", "featured", "lang", "comment", "about", "kwds", "ctype", "cids", "rempts", "svs" ] tldat = util.set_fields_from_reqargs(tlfs, {}) # logging.info("updtl received: " + json.dumps(tldat)) tldb = verify_edit_authorization(appuser, tldat) if tldb: util.fill_missing_fields(tlfs, tldb, tldat) util.set_fields_from_reqargs(tlfs, tldat) # for fields set to "" tldat["cname"] = canonize(tldat.get("name", "")) verify_unique_timeline_field(tldat, "cname", tldb) verify_unique_timeline_field(tldat, "slug", tldb) if tldat.get("featured") == "Promoted": if not tldb or (tldb.get("featured") != "Promoted"): raise ValueError("Promoted feature not authorized") update_prebuilt(tldat, tldb) tldat["lmuid"] = appuser["dsId"] tl = dbacc.write_entity(tldat, tldat.get("modified", "")) except ValueError as e: return util.serve_value_error(e) return util.respJSON(tl)
def notecomp(): """ Note Timeline completion in TLComp instance. """ try: appuser, token = util.authenticate() tlc = { "dsType": "TLComp", "userid": appuser["dsId"], "username": appuser["name"] } tlc = util.set_fields_from_reqargs( ["tlid", "tlname", "tltitle", "tlsubtitle"], tlc) proginst = pop_proginst_from_started(appuser, tlc["tlid"]) tlc["data"] = json.dumps(proginst) tlc = dbacc.write_entity(tlc) push_or_update_completion(appuser, tlc, proginst) appuser = dbacc.write_entity(appuser, appuser["modified"]) dbacc.entcache.cache_put(appuser) # ensure cache has latest except ValueError as e: return util.serve_value_error(e) return util.respJSON([appuser, token], audience="private")
def verify_db_instance(fr, fob): dbob = get_db_object(fob) if not dbob: return updated = "" if not dbob["importid"]: dbob["importid"] = fob["importid"] updated += " importid:" + fob["importid"] if fob["codes"] and not dbob["codes"]: dbob["codes"] = fob["codes"] updated += " codes:" + fob["codes"] if fob["pic"] and not dbob["pic"]: imgfilename = fr + "images/" + fob["importid"] + ".png" with open(imgfilename, 'rb') as imagefile: bdat = imagefile.read() dbob["pic"] = base64.b64encode(bdat) updated += " pic" if updated: global converted converted += 1 logging.info("Updated Point " + dbob["dsId"] + updated) dbacc.write_entity(dbob, vck=dbob["modified"])
def notefs(): """ Note first save of Timeline progress. """ # normally called to make note of an anonymous user having reached the # first save point in a timeline. Shows someone interacted with the # timeline even if they don't create an account. try: det = { "useragent": flask.request.headers.get("User-Agent", ""), "tlid": dbacc.reqarg("tlid", "dbid"), "tlname": dbacc.reqarg("tlname", "string"), "uid": dbacc.reqarg("uid", "dbid") } dcd = { "dsType": "DayCount", "tstamp": dbacc.timestamp(), "rtype": "guestsave", "detail": json.dumps(det) } dbacc.write_entity(dcd) except ValueError as e: return util.serve_value_error(e) return util.respJSON("[]")
def updacc(): try: appuser, token = authenticate() chg = update_email_and_password( appuser, dbacc.reqarg("updemail", "AppUser.email"), dbacc.reqarg("updpassword", "string")) if chg != "nochange": logging.info("Changing " + chg + " for " + appuser["email"]) update_account_fields(appuser) update_accessed_count(appuser) actcode = dbacc.reqarg("actcode", "string") if actcode: logging.info(appuser["email"] + " actcode: " + actcode) if actcode == appuser["actcode"]: appuser["status"] = "Active" else: logging.info("actcode did not match: " + appuser["actcode"]) appuser = dbacc.write_entity(appuser, appuser["modified"]) dbacc.entcache.cache_put(appuser) # ensure cache has latest token = token_for_user(appuser) # return possibly updated token except ValueError as e: return serve_value_error(e) return respJSON([appuser, token], audience="private")
def get_connection_service(svcname): cs = dbacc.cfbk("AppService", "name", svcname) if not cs: # create needed placeholder for administrators to update cs = dbacc.write_entity({"dsType": "AppService", "name": svcname}) return cs