コード例 #1
0
ファイル: documents.py プロジェクト: nivertech/aleph
def update_document(document):
    # These are operations that should be executed after each
    # write to a document or its metadata.
    analyze_document_id.delay(document.id)
    index_document(document, index_records=False)
    with graph.transaction() as tx:
        graph.load_document(tx, document)
コード例 #2
0
ファイル: documents.py プロジェクト: CodeForAfrica/aleph
def update_document(document):
    # These are operations that should be executed after each
    # write to a document or its metadata.
    analyze_document_id.apply_async([document.id], queue=USER_QUEUE,
                                    routing_key=USER_ROUTING_KEY)
    index_document(document, index_records=False)
    with graph.transaction() as tx:
        graph.load_document(tx, document)
コード例 #3
0
def update_document(document):
    # These are operations that should be executed after each
    # write to a document or its metadata.
    analyze_document_id.apply_async([document.id],
                                    queue=USER_QUEUE,
                                    routing_key=USER_ROUTING_KEY)
    index_document(document, index_records=False)
    with graph.transaction() as tx:
        graph.load_document(tx, document)
コード例 #4
0
ファイル: __init__.py プロジェクト: correctiv/aleph
def analyze_document(document):
    log.info("Analyze document: %r", document)
    analyzers = []
    meta = document.meta
    for cls in get_analyzers():
        analyzer = cls(document, meta)
        analyzer.prepare()
        analyzers.append(analyzer)

    for text in document.text_parts():
        for analyzer in analyzers:
            analyzer.on_text(text)

    for analyzer in analyzers:
        analyzer.finalize()
    document.meta = meta
    db.session.add(document)
    db.session.commit()
    index_document(document)
    with graph.transaction() as tx:
        graph.load_document(tx, document)
コード例 #5
0
ファイル: __init__.py プロジェクト: nivertech/aleph
def analyze_document(document):
    log.info("Analyze document: %r", document)
    analyzers = []
    meta = document.meta
    for cls in get_analyzers():
        analyzer = cls(document, meta)
        analyzer.prepare()
        analyzers.append(analyzer)

    for text in document.text_parts():
        for analyzer in analyzers:
            analyzer.on_text(text)

    for analyzer in analyzers:
        analyzer.finalize()
    document.meta = meta
    db.session.add(document)
    db.session.commit()
    index_document(document)
    with graph.transaction() as tx:
        graph.load_document(tx, document)