def edit_page(name, author, body, note, history=True): """Write to a page.""" name = name.strip("/") # Remove any surrounding slashes. # Get the old page first. page = get_page(name) if not page: # Initialize the page. page = dict(revisions=[], ) # The new revision to be added. rev = dict( id=hashlib.md5(str(int(time.time())).encode("utf-8")).hexdigest(), time=int(time.time()), author=author, body=body, note=note or "Updated the page.", ) # Updating the history? if history: page["revisions"].insert(0, rev) else: # Replacing the original item. if len(page["revisions"]): page["revisions"][0] = rev else: page["revisions"].append(rev) # Write it. logger.info("Write to Wiki page {}".format(name)) JsonDB.commit("wiki/pages/{}".format(name), page) return True
def edit_page(name, author, body, note, history=True): """Write to a page.""" name = name.strip("/") # Remove any surrounding slashes. # Get the old page first. page = get_page(name) if not page: # Initialize the page. page = dict( revisions=[], ) # The new revision to be added. rev = dict( id=hashlib.md5(str(int(time.time())).encode("utf-8")).hexdigest(), time=int(time.time()), author=author, body=body, note=note or "Updated the page.", ) # Updating the history? if history: page["revisions"].insert(0, rev) else: # Replacing the original item. if len(page["revisions"]): page["revisions"][0] = rev else: page["revisions"].append(rev) # Write it. logger.info("Write to Wiki page {}".format(name)) JsonDB.commit("wiki/pages/{}".format(name), page) return True
def delete_history(name, revision): """Delete a revision entry from the history.""" name = name.strip("/") # Get page first. page = get_page(name) if not page: return None # Revise history. history = list() for rev in page["revisions"]: if rev["id"] == revision: logger.info("Delete history ID {} from Wiki page {}".format( revision, name)) continue history.append(rev) # Empty history = delete the page. if len(history) == 0: logger.info( "Deleted last history item; Remove Wiki page {}".format(name)) return delete_page(name) page["revisions"] = history JsonDB.commit("wiki/pages/{}".format(name), page) return True
def delete_history(name, revision): """Delete a revision entry from the history.""" name = name.strip("/") # Get page first. page = get_page(name) if not page: return None # Revise history. history = list() for rev in page["revisions"]: if rev["id"] == revision: logger.info("Delete history ID {} from Wiki page {}".format(revision, name)) continue history.append(rev) # Empty history = delete the page. if len(history) == 0: logger.info("Deleted last history item; Remove Wiki page {}".format(name)) return delete_page(name) page["revisions"] = history JsonDB.commit("wiki/pages/{}".format(name), page) return True
def rebuild_index(): """Rebuild the index.json if it goes missing.""" index = {} entries = JsonDB.list_docs("blog/entries") for post_id in entries: db = JsonDB.get("blog/entries/{}".format(post_id)) update_index(post_id, db, index, False) JsonDB.commit("blog/index", index) return index
def create(username, password, name=None, uid=None, role="user"): """Create a new user account. Returns the user ID number assigned to this user.""" # Name defaults to username. if name is None: name = username username = username.lower() # Provided with a user ID? if uid is not None: # See if it's available. if exists(uid=uid): logger.warning("Wanted to use UID {} for user {} but it wasn't available.".format(uid, username)) uid = None # Need to generate a UID? if uid is None: uid = get_next_uid() uid = int(uid) # Username musn't exist. if exists(username): # The front-end shouldn't let this happen. raise Exception("Can't create username {}: already exists!".format(username)) # Crypt their password. hashedpass = hash_password(password) logger.info("Create user {} with username {}".format(uid, username)) # Create the user file. JsonDB.commit("users/by-id/{}".format(uid), dict( uid=uid, username=username, name=name, picture="", role=role, password=hashedpass, created=time.time(), )) # And their username to ID map. JsonDB.commit("users/by-name/{}".format(username), dict( uid=uid, )) return uid
def delete_entry(post_id): """Remove a blog entry.""" # Fetch the blog information. index = get_index() post = get_entry(post_id) if post is None: logger.warning("Can't delete post {}, it doesn't exist!".format(post_id)) # Delete the post. JsonDB.delete("blog/entries/{}".format(post_id)) # Update the index cache. del index[str(post_id)] # Python JSON dict keys must be strings, never ints JsonDB.commit("blog/index", index)
def convert_index(): print "Converting blog index" index = json_get("blog/index/1.json") new = {} for post_id, data in index.iteritems(): del data["id"] # Enforce data types. data["author"] = int(data["author"]) data["time"] = int(data["time"]) data["sticky"] = bool(data["sticky"]) new[post_id] = data JsonDB.commit("blog/index", new)
def convert_subscriptions(): print "Converting subscriptions..." for name in glob.glob(os.path.join(siikir, "subscribers/1/*.json")): name = name.split("/")[-1] if name.startswith("photos-"): continue data = json_get("subscribers/1/{}".format(name)) thread = name[:len(name)-5] # Enforce data types. for email in data: data[email] = int(data[email]) print "*", thread JsonDB.commit("comments/subscribers/{}".format(thread), data)
def delete_entry(post_id): """Remove a blog entry.""" # Fetch the blog information. index = get_index() post = get_entry(post_id) if post is None: logger.warning( "Can't delete post {}, it doesn't exist!".format(post_id)) # Delete the post. JsonDB.delete("blog/entries/{}".format(post_id)) # Update the index cache. del index[str( post_id)] # Python JSON dict keys must be strings, never ints JsonDB.commit("blog/index", index)
def update_user(uid, data): """Update the user's data.""" if not exists(uid=uid): raise Exception("Can't update user {}: doesn't exist!".format(uid)) db = get_user(uid=uid) # Change of username? if "username" in data and len(data["username"]) and data["username"] != db["username"]: JsonDB.delete("users/by-name/{}".format(db["username"])) JsonDB.commit("users/by-name/{}".format(data["username"]), dict( uid=int(uid), )) db.update(data) JsonDB.commit("users/by-id/{}".format(uid), db)
def convert_subscriptions(): print "Converting subscriptions..." for name in glob.glob(os.path.join(siikir, "subscribers/1/*.json")): name = name.split("/")[-1] if name.startswith("photos-"): continue data = json_get("subscribers/1/{}".format(name)) thread = name[:len(name) - 5] # Enforce data types. for email in data: data[email] = int(data[email]) print "*", thread JsonDB.commit("comments/subscribers/{}".format(thread), data)
def legacy_download(): form = None if request.method == "POST": form = request.form else: # CNET links to the MS-DOS download using semicolon delimiters in the # query string. Fix that if detected. query = request.query_string.decode() if not '&' in query and ';' in query: url = re.sub(r';|%3b', '&', request.url, flags=re.IGNORECASE) return redirect(url) form = request.args method = form.get("method", "index") project = form.get("project", "") filename = form.get("file", "") root = "/home/kirsle/www/projects" if project and filename: # Filter the sections. project = re.sub(r'[^A-Za-z0-9]', '', project) # Project name is alphanumeric only. filename = re.sub(r'[^A-Za-z0-9\-_\.]', '', filename) # Check that all the files exist. if os.path.isdir(os.path.join(root, project)) and os.path.isfile(os.path.join(root, project, filename)): # Hit counters. hits = { "hits": 0 } db = "data/downloads/{}-{}".format(project, filename) if JsonDB.exists(db.format(project, filename)): hits = JsonDB.get(db) # Actually getting the file? if method == "get": # Up the hit counter. hits["hits"] += 1 JsonDB.commit(db, hits) g.info["method"] = method g.info["project"] = project g.info["file"] = filename g.info["hits"] = hits["hits"] return template("download.html") flash("The file or project wasn't found.") return redirect(url_for("index"))
def convert_comments(): print "Converting comments..." for name in glob.glob(os.path.join(siikir, "comments/1/*.json")): name = name.split("/")[-1] if name.startswith("photos-"): continue data = json_get("comments/1/{}".format(name)) thread = name[:len(name)-5] # Enforce data types. for cid in data: data[cid]["time"] = int(data[cid]["time"]) data[cid]["uid"] = int(data[cid]["uid"]) print "*", thread JsonDB.commit("comments/threads/{}".format(thread), data)
def convert_comments(): print "Converting comments..." for name in glob.glob(os.path.join(siikir, "comments/1/*.json")): name = name.split("/")[-1] if name.startswith("photos-"): continue data = json_get("comments/1/{}".format(name)) thread = name[:len(name) - 5] # Enforce data types. for cid in data: data[cid]["time"] = int(data[cid]["time"]) data[cid]["uid"] = int(data[cid]["uid"]) print "*", thread JsonDB.commit("comments/threads/{}".format(thread), data)
def convert_posts(): print "Converting blog entries..." for name in glob.glob(os.path.join(siikir, "blog/entries/1/*.json")): name = name.split("/")[-1] post = json_get("blog/entries/1/{}".format(name)) post_id = post["id"] del post["id"] # Enforce data types. post["time"] = int(post["time"]) post["author"] = int(post["author"]) post["comments"] = bool(post["comments"]) post["sticky"] = bool(post["sticky"]) post["emoticons"] = bool(post["emoticons"]) print "*", post["subject"] JsonDB.commit("blog/entries/{}".format(post_id), post)
def legacy_download(): form = None if request.method == "POST": form = request.form else: form = request.args method = form.get("method", "index") project = form.get("project", "") filename = form.get("file", "") root = "/home/kirsle/www/projects" if project and filename: # Filter the sections. project = re.sub(r'[^A-Za-z0-9]', '', project) # Project name is alphanumeric only. filename = re.sub(r'[^A-Za-z0-9\-_\.]', '', filename) # Check that all the files exist. if os.path.isdir(os.path.join(root, project)) and os.path.isfile( os.path.join(root, project, filename)): # Hit counters. hits = {"hits": 0} db = "data/downloads/{}-{}".format(project, filename) if JsonDB.exists(db.format(project, filename)): hits = JsonDB.get(db) # Actually getting the file? if method == "get": # Up the hit counter. hits["hits"] += 1 JsonDB.commit(db, hits) g.info["method"] = method g.info["project"] = project g.info["file"] = filename g.info["hits"] = hits["hits"] return template("download.html") flash("The file or project wasn't found.") return redirect(url_for("index"))
def rebuild_visitor_stats(): """Recalculate the total unique/hits based on daily info.""" total_unique = {} total_hits = 0 # Tally them all up! for date in JsonDB.list_docs("traffic/unique"): if date == "total": continue db = JsonDB.get("traffic/unique/{}".format(date), cache=False) total_unique.update(db) for date in JsonDB.list_docs("traffic/hits"): if date == "total": continue db = JsonDB.get("traffic/hits/{}".format(date), cache=False) total_hits += db.get("hits", 0) # Write the outputs. JsonDB.commit("traffic/unique/total", total_unique) JsonDB.commit("traffic/hits/total", dict(hits=total_hits))
def legacy_download(): form = None if request.method == "POST": form = request.form else: form = request.args method = form.get("method", "index") project = form.get("project", "") filename = form.get("file", "") root = "/home/kirsle/www/projects" if project and filename: # Filter the sections. project = re.sub(r'[^A-Za-z0-9]', '', project) # Project name is alphanumeric only. filename = re.sub(r'[^A-Za-z0-9\-_\.]', '', filename) # Check that all the files exist. if os.path.isdir(os.path.join(root, project)) and os.path.isfile(os.path.join(root, project, filename)): # Hit counters. hits = { "hits": 0 } db = "data/downloads/{}-{}".format(project, filename) if JsonDB.exists(db.format(project, filename)): hits = JsonDB.get(db) # Actually getting the file? if method == "get": # Up the hit counter. hits["hits"] += 1 JsonDB.commit(db, hits) g.info["method"] = method g.info["project"] = project g.info["file"] = filename g.info["hits"] = hits["hits"] return template("download.html") flash("The file or project wasn't found.") return redirect(url_for("index"))
def update_index(post_id, post, index=None, commit=True): """Update a post's meta-data in the index. This is also used for adding a new post to the index for the first time. * post_id: The ID number for the post * post: The DB object for a blog post * index: If you already have the index open, you can pass it here * commit: Write the DB after updating the index (default True)""" if index is None: index = get_index() index[post_id] = dict( fid=post["fid"], time=post["time"] or int(time.time()), categories=post["categories"], sticky=False, # TODO author=post["author"], privacy=post["privacy"] or "public", subject=post["subject"], ) if commit: JsonDB.commit("blog/index", index)
def log_referrer(request, link): """Double check the referring URL.""" # Ignore if same domain. hostname = server_name() if link.startswith("http://{}".format(hostname)) or \ link.startswith("https://{}".format(hostname)): return None # See if the URL really links back to us. hostname = server_name() try: r = requests.get( link, timeout=5, verify=False, # Don't do SSL verification ) # Make sure the request didn't just redirect back to our main site # (e.g. http://whatever.example.com wildcard may redirect back to # http://example.com, and if that's us, don't log that! if r.url.startswith("http://{}".format(hostname)) or \ r.url.startswith("https://{}".format(hostname)): return None # Look for our hostname in their page. if hostname in r.text: # Log it. db = list() if JsonDB.exists("traffic/referrers"): # Don't cache the result -- the list can get huge! db = JsonDB.get("traffic/referrers", cache=False) db.append(link) JsonDB.commit("traffic/referrers", db, cache=False) return link except: pass return None
def update_index(post_id, post, index=None, commit=True): """Update a post's meta-data in the index. This is also used for adding a new post to the index for the first time. * post_id: The ID number for the post * post: The DB object for a blog post * index: If you already have the index open, you can pass it here * commit: Write the DB after updating the index (default True)""" if index is None: index = get_index() index[post_id] = dict( fid = post["fid"], time = post["time"] or int(time.time()), categories = post["categories"], sticky = False, # TODO author = post["author"], privacy = post["privacy"] or "public", subject = post["subject"], ) if commit: JsonDB.commit("blog/index", index)
def write_comments(thread, comments): """Save the comments DB.""" if len(comments.keys()) == 0: return JsonDB.delete("comments/threads/{}".format(thread)) return JsonDB.commit("comments/threads/{}".format(thread), comments)
def write_index(index): """Save the index back to the DB.""" return JsonDB.commit("photos/index", index)
def post_entry(post_id, fid, epoch, author, subject, avatar, categories, privacy, ip, emoticons, comments, format, body): """Post (or update) a blog entry.""" # Fetch the index. index = get_index() # Editing an existing post? if not post_id: post_id = get_next_id(index) logger.debug("Posting blog post ID {}".format(post_id)) # Get a unique friendly ID. if not fid: # The default friendly ID = the subject. fid = subject.lower() fid = re.sub(r'[^A-Za-z0-9]', '-', fid) fid = re.sub(r'\-+', '-', fid) fid = fid.strip("-") logger.debug("Chosen friendly ID: {}".format(fid)) # Make sure the friendly ID is unique! if len(fid): test = fid loop = 1 logger.debug("Verifying the friendly ID is unique: {}".format(fid)) while True: collision = False for k, v in index.items(): # Skip the same post, for updates. if k == post_id: continue if v["fid"] == test: # Not unique. loop += 1 test = fid + "_" + unicode(loop) collision = True logger.debug("Collision with existing post {}: {}".format(k, v["fid"])) break # Was there a collision? if collision: continue # Try again. # Nope! break fid = test # DB body for the post. db = dict( fid = fid, ip = ip, time = epoch or int(time.time()), categories = categories, sticky = False, # TODO: implement sticky comments = comments, emoticons = emoticons, avatar = avatar, privacy = privacy or "public", author = author, subject = subject, format = format, body = body, ) # Write the post. JsonDB.commit("blog/entries/{}".format(post_id), db) # Update the index cache. update_index(post_id, db, index) return post_id, fid
def write_subscribers(thread, subs): """Save the subscribers to the DB.""" if len(subs.keys()) == 0: return JsonDB.delete("comments/subscribers/{}".format(thread)) return JsonDB.commit("comments/subscribers/{}".format(thread), subs)
def track_visit(request, session): """Main logic to track and log visitor details.""" # Get their tracking cookie value. The value will either be their HTTP # referrer (if exists and valid) or else a "1". cookie = session.get("tracking") addr = remote_addr() values = dict() # Returnable traffic values # Log hit counts. We need four kinds: # - Unique today - Unique total # - Hits today - Hits total today = pretty_time("%Y-%m-%d", time.time()) files = { "unique/{}".format(today): "unique_today", "unique/total": "unique_total", "hits/{}".format(today): "hits_today", "hits/total": "hits_total", } # Go through the hit count files. Update them only if their tracking # cookie was not present. for file, key in files.items(): dbfile = "traffic/{}".format(file) if file.startswith("hits"): # Hit file is just a simple counter. db = dict(hits=0) if JsonDB.exists(dbfile): db = JsonDB.get(dbfile) if db is None: db = dict(hits=0) # Update it? if not cookie: db["hits"] += 1 JsonDB.commit(dbfile, db) # Store the copy. values[key] = db["hits"] else: # Unique file is a collection of IP addresses. db = dict() if JsonDB.exists(dbfile): db = JsonDB.get(dbfile) if db is None: db = dict() # Update with their IP? if not cookie and not addr in db: db[addr] = time.time() JsonDB.commit(dbfile, db) # Store the copy. values[key] = len(db.keys()) # Log their HTTP referrer. referrer = "1" if request.referrer: # Branch and check this. referrer = log_referrer(request, request.referrer) if not referrer: # Wasn't a valid referrer. referrer = "1" # Set their tracking cookie. if not cookie: cookie = referrer session["tracking"] = cookie return values
def post_entry(post_id, fid, epoch, author, subject, avatar, categories, privacy, ip, emoticons, comments, format, body): """Post (or update) a blog entry.""" # Fetch the index. index = get_index() # Editing an existing post? if not post_id: post_id = get_next_id(index) logger.debug("Posting blog post ID {}".format(post_id)) # Get a unique friendly ID. if not fid: # The default friendly ID = the subject. fid = subject.lower() fid = re.sub(r'[^A-Za-z0-9]', '-', fid) fid = re.sub(r'\-+', '-', fid) fid = fid.strip("-") logger.debug("Chosen friendly ID: {}".format(fid)) # Make sure the friendly ID is unique! if len(fid): test = fid loop = 1 logger.debug("Verifying the friendly ID is unique: {}".format(fid)) while True: collision = False for k, v in index.items(): # Skip the same post, for updates. if k == post_id: continue if v["fid"] == test: # Not unique. loop += 1 test = fid + "_" + unicode(loop) collision = True logger.debug("Collision with existing post {}: {}".format( k, v["fid"])) break # Was there a collision? if collision: continue # Try again. # Nope! break fid = test # DB body for the post. db = dict( fid=fid, ip=ip, time=epoch or int(time.time()), categories=categories, sticky=False, # TODO: implement sticky comments=comments, emoticons=emoticons, avatar=avatar, privacy=privacy or "public", author=author, subject=subject, format=format, body=body, ) # Write the post. JsonDB.commit("blog/entries/{}".format(post_id), db) # Update the index cache. update_index(post_id, db, index) return post_id, fid