def crawl_delete(): delegate = Delegate() try: id = request.args.get('id', type=int) crawl = delegate.crawl_get_by_id(id) delegate.crawl_delete(crawl) flash('Crawl deleted') return redirect(url_for('crawl')) except ValueError as ve: flash('No crawl id.') return redirect(url_for('crawl'))
def crawl_report(): global PROGRESS_TRACKER delegate = Delegate() # print("\n{}: Current session tracker: {}".format(threading.current_thread().ident, session['progress_tracker'])) crawlId = request.args.get('id') try: crawl = delegate.crawl_get_by_id(crawlId) # ptj = crawl.note # progress = jsonpickle.decode(ptj) progress = PROGRESS_TRACKER.get_progress(crawlId) return jsonify(progress) except ValueError as ve: flash('No crawl id.') return redirect(url_for('crawl'))
def crawl_view_links(): delegate = Delegate() try: id = request.args.get('id', type=int) crawl = delegate.crawl_get_by_id(id) links = delegate.url_get_all_by_crawl_id(id) user = delegate.user_get_by_id(session['user_id']) sites = delegate.site_get_all( ) # TODO: In the future show only sites for current user return render_template('crawl_view_links.html', crawl=crawl, links=links, user=user, sites=sites) except ValueError as ve: flash('No crawl id.') return redirect(url_for('crawl'))