Beispiel #1
0
def update_prebuilt(tldat, tldb):
    lpx = "update_prebuilt Timeline " + str(tldat.get("dsId", "new") + " ")
    # make a reference dict out of whatever existing preb is available
    preb = tldat.get("preb")
    if not preb and tldb:
        preb = tldb.get("preb")
    preb = preb or "[]"
    preb = util.load_json_or_default(preb, [])
    ptd = {k["dsId"]: k for k in preb}
    # update any ptd entries that were modified since last timeline save
    if tldb and tldat.get("cids"):
        logging.info(lpx + "fetching points updated since last timeline save")
        where = ("WHERE modified > \"" + tldb["modified"] + "\"" +
                 "AND dsId IN (" + tldat["cids"] + ")")
        points = dbacc.query_entity("Point", where)
        for point in points:
            ptd[point["dsId"]] = point_preb_summary(point)
    # rebuild preb, fetching points for any missing ptd entries
    logging.info(lpx + "rebuilding preb")
    preb = []
    for pid in util.csv_to_list(tldat.get("cids", "")):
        summary = ptd.get(pid)  # dict or None
        if not summary:
            point = dbacc.cfbk("Point", "dsId", pid)
            if point:
                summary = point_preb_summary(point)
        if not summary:
            logging.info(lpx + "No point info for pid " + pid)
        else:  # have summary info
            preb.append(summary)
    tldat["preb"] = json.dumps(preb)
Beispiel #2
0
def collect_edited_points():
    where = ("WHERE modified >= \"" + rst["start"] + "\"" +
             " AND modified < \"" + rst["end"] + "\"")
    for pt in dbacc.query_entity("Point", where):
        uid = pt["lmuid"]
        verify_active_user(uid)
        ptedits = rst["dets"]["users"][uid]["ptedits"]
        ptedits[pt["dsId"]] = {"date": pt["date"], "text": pt["text"][0:60]}
Beispiel #3
0
def convert_refs():
    """ Walk the database for unconverted instances and convert each. """
    for entity, fcs in refcons.items():
        where = "WHERE batchconv IS NULL OR batchconv != \"importconv\""
        cobs = dbacc.query_entity(entity, where)
        for cob in cobs:
            cob = convert_ref_fields(cob, fcs)
            cob["batchconv"] = "importconv"
            dbacc.write_entity(cob, vck=cob["modified"])
Beispiel #4
0
def write_daysum_details():
    where = ("WHERE rtype = \"daysum\" AND tstamp = \"" + rst["start"] + "\"" +
             " ORDER BY created LIMIT 1")
    dcs = dbacc.query_entity("DayCount", where)
    if len(dcs) > 0:
        dc = dcs[0]
    else:
        dc = {"dsType": "DayCount", "rtype": "daysum", "tstamp": rst["start"]}
    dc["detail"] = json.dumps(rst["dets"])
    dbacc.write_entity(dc, dc.get("modified"))
Beispiel #5
0
def get_db_object(fob):
    src = fob["source"]
    objs = dbacc.query_entity("Point", "WHERE source=\"" + src + "\" LIMIT 2")
    if len(objs) > 1:
        logging.warning("Multiple points for source " + src)
        return None
    if len(objs) < 1:
        logging.warning("Point source: " + src + ", importid: " +
                        fob["importid"] + " not found.")
        return None
    return objs[0]
Beispiel #6
0
def featured():
    """ Return currently featured timelines to select from. """
    try:
        where = []
        for badval in ["Unlisted", "Archived", "Deleted"]:
            where.append("featured != \"" + badval + "\"")
        where = "WHERE " + " AND ".join(where)
        tls = dbacc.query_entity("Timeline", where)
    except ValueError as e:
        return util.serve_value_error(e)
    return util.respJSON(tls)
Beispiel #7
0
def findcomps():
    """ Return completions from other people for the given timeline. """
    try:
        appuser, _ = util.authenticate()
        tlid = dbacc.reqarg("tlid", "dbid", required=True)
        where = ("WHERE tlid = " + tlid + " AND userid != " + appuser["dsId"] +
                 " ORDER BY modified DESC LIMIT 50")
        tlcs = dbacc.query_entity("TLComp", where)
    except ValueError as e:
        return util.serve_value_error(e)
    return util.respJSON(tlcs)
Beispiel #8
0
def roll_up_day_counts():
    where = ("WHERE tstamp >= \"" + rst["start"] + "\"" + " AND tstamp < \"" +
             rst["end"] + "\"")
    for dc in dbacc.query_entity("DayCount", where):
        det = json.loads(dc.get("detail", "{}"))
        if dc["rtype"] == "tlfetch":
            bump_count("refers", det.get("referer"))
            bump_count("agents", det.get("useragent"))
            bump_timeline(det.get("tlid"), det.get("tlname"), "fetch",
                          det.get("uid", "guest"))
        elif dc["rtype"] == "guestsave":
            bump_count("agents", det.get("useragent"))
            bump_timeline(det.get("tlid"), det.get("tlname"), "save", "guest")
Beispiel #9
0
def verify_unique_timeline_field(tldat, field, tldb):
    # logging.info("vutf tldat: " + str(tldat))
    if field == "cname" and not tldat[field]:
        raise ValueError("A unique name is required.")
    if field == "slug" and not tldat.get(field):
        return True  # slug is optional.  May be missing from tldat.
    if tldb and (tldat[field] == tldb[field]):
        return True  # hasn't changed, so still unique
    where = "WHERE " + field + "=\"" + tldat[field] + "\" LIMIT 1"
    objs = dbacc.query_entity("Timeline", where)
    if len(objs) > 0:
        if field == "cname":
            field = "name"
        raise ValueError("There is already a timeline with that " + field)
    return True
Beispiel #10
0
def collect_active_user_stats():
    where = ("WHERE modified >= \"" + rst["start"] + "\"" +
             " AND modified < \"" + rst["end"] + "\"")
    for user in dbacc.query_entity("AppUser", where):
        verify_active_user(user["dsId"], user)
        started = safe_load_json_array(user, "started")
        for tpi in started:
            if within_time_window(tpi.get("latestsave")):
                # must have fetched timeline so tlname already set up
                bump_timeline(tpi["tlid"], "", "save", user["dsId"])
        completed = safe_load_json_array(user, "completed")
        for tci in completed:
            if within_time_window(tci.get("latest")):
                bump_timeline(tci["tlid"], tci["name"], "comp", user["dsId"])
        built = safe_load_json_array(user, "built")
        for tmi in built:
            if within_time_window(tmi.get("modified")):
                bump_timeline(tmi["tlid"], tmi["name"], "edit", user["dsId"])