def crawl_archive(): for day_link in archive_links(start_date, end_date, oneday): starttimer = time.time() for link in get_link_from_day_arhive(day_link[0]): db.add_story({'date': day_link[1], 'url': link}) timediff = time.time() - starttimer logging.info("{0}: {1}".format(day_link[1], timediff))
def add(): if not 'username' in session: return redirect(url_for('login')) if request.method == 'GET': return render_template('addPage.html', username=session['username']) else: db.add_story(session['username'], request.form['data'].strip()) return redirect('story/' + request.form['data'].strip())
def home(): db.auth() if request.method == 'GET': titles=db.getTitles() return render_template("home.html",titles=titles) else: button=request.form["button"] if button == "Create!": newname=str(request.form["newtitle"]) if len(newname)>0: db.add_story(newname) titles=db.getTitles() return redirect(url_for('home')) elif button=='Read!': selected=request.form.get("otitle","") titles=db.getTitles() return redirect(url_for('story',s=selected)) #story.html TBC elif button=='Drop Story': selected=request.form.get("drop","") db.remove_story(selected) return redirect(url_for('home')) return redirect(url_for('home')) return redirect(url_for('home'))