コード例 #1
0
ファイル: app.py プロジェクト: zheolls/TechNews
def get_news_manager():
    page = int(request.args.get('page', 0))
    limit = int(request.args.get('limit', 10))
    sql = 'select articleid,title,url,imgurl,source,date from article LIMIT %s'
    news = cursor.execute(sql, (limit, ))
    news = cursor.fetchall()

    # import json
    # from bson import json_util
    # docs_list = list(news)
    # return json.dumps(docs_list, default=json_util.default)

    data = []
    for n in news:
        item = {
            'id': str(n[0]),
            'title': n[1],
            'url': n[2],
            'image': n[3],
            'source': n[4],
            'created_at': datetime_format(n[5]),
        }

        data.append(item)
    return jsonify(data)
コード例 #2
0
ファイル: app.py プロジェクト: zheolls/TechNews
def index():
    page = int(request.args.get('page', 0))
    limit = int(request.args.get('limit', 100))

    sql = 'select articleid,title,url,imgurl,source,date from article LIMIT %s'
    cursor.execute(sql, (limit, ))
    news = cursor.fetchall()
    # import json
    # from bson import json_util
    # docs_list = list(news)
    # return json.dumps(docs_list, default=json_util.default)

    entries = []
    for n in news:
        item = {
            'id': str(n[0]),
            'title': n[1],
            'url': n[2],
            'image': n[3],
            'source': n[4],
            'created_at': datetime_format(n[5]),
        }

        entries.append(item)
        # random.shuffle(entries)

    return render_template('show_entries.html', entries=entries)
コード例 #3
0
ファイル: app.py プロジェクト: wy17910/FutureNews
def index():
    page = int(request.args.get('page', 0))
    limit = int(request.args.get('limit', 100))

    news = db.news.find().sort("crawled_at", -1).skip(page * limit).limit(limit)

    # import json
    # from bson import json_util
    # docs_list = list(news)
    # return json.dumps(docs_list, default=json_util.default)

    entries = []
    for n in news:
        item = {
            'id': str(n['_id']),
            'title': n['title'],
            'url': n['url'],
            'image': n['img_url'],
            'category': n['category'],
            'source': n['source'],
            'created_at': datetime_format(n['crawled_at']),
        }

        entries.append(item)
        # random.shuffle(entries)

    return render_template('show_entries.html', entries=entries)
コード例 #4
0
ファイル: app.py プロジェクト: wy17910/FutureNews
def get_news():
    page = int(request.args.get('page', 0))
    limit = int(request.args.get('limit', 10))

    news = db.news.find().sort("crawled_at", -1).skip(page * limit).limit(limit)

    # import json
    # from bson import json_util
    # docs_list = list(news)
    # return json.dumps(docs_list, default=json_util.default)

    data = []
    for n in news:
        item = {
            'id': str(n['_id']),
            'title': n['title'],
            'url': n['url'],
            'image': n['img_url'],
            'category': n['category'],
            'source': n['source'],
            'created_at': datetime_format(n['crawled_at']),
        }

        data.append(item)
    return jsonify(data)
コード例 #5
0
ファイル: app.py プロジェクト: dog1203/BlockChainews_ajax
def index():
    page = int(request.args.get('page', 0))
    limit = int(request.args.get('limit', 100))

    news = db.news.find().sort("crawled_at", -1).skip(page * limit).limit(limit)

    # import json
    # from bson import json_util
    # docs_list = list(news)
    # return json.dumps(docs_list, default=json_util.default)

    entries = []
    for n in news:
        item = {
            'id': str(n['_id']),
            'title': n['title'],
            'url': n['url'],
            'image': n['img_url'],
            'category': n['category'],
            'source': n['source'],
            'content': n['content'],
            'created_at': datetime_format(n['crawled_at']),
        }

        entries.append(item)
        # random.shuffle(entries)

    return render_template('index.html', entries=entries)
コード例 #6
0
ファイル: app.py プロジェクト: dog1203/BlockChainews_ajax
def get_news():
    page = int(request.args.get('page', 0))
    limit = int(request.args.get('limit', 10))

    news = db.news.find().sort("crawled_at", -1).skip(page * limit).limit(limit)

    # import json
    # from bson import json_util ``````
    # docs_list = list(news)
    # return json.dumps(docs_list, default=json_util.default)

    data = []
    for n in news:
        item = {
            'id': str(n['_id']),
            'title': n['title'],
            'url': n['url'],
            'image': n['img_url'],
            'category': n['category'],
            'source': n['source'],
            'content': n['content'],
            'created_at': datetime_format(n['crawled_at']),
        }

        data.append(item)
    return jsonify(data)