Beispiel #1
0
def cli(debug, cache, incremental):
    """Crawler framework for documents and structured scrapers."""
    settings.HTTP_CACHE = cache
    settings.INCREMENTAL = incremental
    settings.DEBUG = debug
    if settings.DEBUG:
        logging.basicConfig(level=logging.DEBUG)
    init_memorious()
Beispiel #2
0
import math
from urllib.parse import urlencode
import logging
from flask import Flask, request, redirect, jsonify
from flask import render_template, abort, url_for
from babel.numbers import format_number
from babel.dates import format_date, format_datetime

from memorious.core import settings, manager, init_memorious
from memorious.model import Event, Crawl

PAGE_SIZE = 50
app = Flask(__name__)
init_memorious()
if settings.DEBUG:
    logging.basicConfig(level=logging.DEBUG)
else:
    logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)


@app.template_filter("number")
def number_filter(s, default=""):
    if s is None or s == 0 or not len(str(s)):
        return default
    return format_number(s, locale="en_GB")


@app.template_filter("datetime")
def datetime_filter(s):
    if s is None or s == 0 or not len(str(s)):