Пример #1
0
def process_search(search_request_id):

    logger.info('searching %s' % search_request_id)

    # purge any recent work
    key = 'searchrequest/%s' % search_request_id

    RW = get_worker_cache()
    RW.delete(key)

    db = get_database()

    try:
        find_search_items_sql = """
         select s.id, 
                COALESCE( reg.register_code, 'All'),
                s.namelist_id, 
                si.id as search_item_id,
                si.search_name, 
                si.details 
              from core.search_request s
              left outer join core.register reg on ( reg.id = s.register_id )
              join core.searchitem si on ( si.namelist_id = s.namelist_id )
        where s.id = %s 
        """

        logger.info('searching database')
        cursor = db.cursor('searchCursor')
        cursor.itersize = 1000

        cursor.execute(find_search_items_sql, [search_request_id])
        logger.info('start document ')

        count = 0
        subtasks = []
        for (search_request_id, source, namelist_id, search_item_id,
             search_name, details) in cursor:
            subtasks.append(
                search_name_task.subtask(
                    (search_request_id, source, namelist_id, search_item_id,
                     search_name, details)).set(queue='searchNameQueue'))
    except Exception, e:
        update_status(search_request_id, 'F',
                      dict(message='failed', current=0, total=0))
        return False
Пример #2
0
def process_search(search_request_id):

    logger.info( 'searching %s' % search_request_id )

    # purge any recent work
    key = 'searchrequest/%s' % search_request_id

    RW = get_worker_cache()
    RW.delete(key)

    db = get_database()

    try:
        find_search_items_sql = """
         select s.id, 
                COALESCE( reg.register_code, 'All'),
                s.namelist_id, 
                si.id as search_item_id,
                si.search_name, 
                si.details 
              from core.search_request s
              left outer join core.register reg on ( reg.id = s.register_id )
              join core.searchitem si on ( si.namelist_id = s.namelist_id )
        where s.id = %s 
        """

        logger.info('searching database')
        cursor = db.cursor('searchCursor')
        cursor.itersize = 1000

        cursor.execute(find_search_items_sql, [search_request_id] )
        logger.info('start document ')

        count = 0
        subtasks = []
        for (search_request_id, source, namelist_id, search_item_id, search_name, details ) in cursor:
            subtasks.append( search_name_task.subtask( (search_request_id, source, namelist_id, search_item_id, search_name, details )).set(queue='searchNameQueue') ) 
    except Exception, e:
        update_status(search_request_id, 'F', dict(message='failed', current=0, total=0))
        return False
Пример #3
0
    except Exception, e:
        update_status(search_request_id, 'F',
                      dict(message='failed', current=0, total=0))
        return False

    finally:
        db.close()

    logger.info("created %s subtasks" % len(subtasks))
    job = group(subtasks)
    results = job.apply_async()
    while results.waiting():

        update_status(
            search_request_id, 'S',
            dict(message='searching',
                 current=results.completed_count(),
                 total=len(subtasks)))

        logger.info("progress: %s/%s" %
                    (results.completed_count(), len(subtasks)))
        time.sleep(5)

    logger.info("progress: %s/%s" % (results.completed_count(), len(subtasks)))
    logger.info("subtasks finished")

    if results.successful():
        update_status(
            search_request_id, 'S',
            dict(message='exporting',
                 current=results.completed_count(),
Пример #4
0
            subtasks.append( search_name_task.subtask( (search_request_id, source, namelist_id, search_item_id, search_name, details )).set(queue='searchNameQueue') ) 
    except Exception, e:
        update_status(search_request_id, 'F', dict(message='failed', current=0, total=0))
        return False

    finally:
        db.close()


    logger.info("created %s subtasks" % len( subtasks ))
    job = group( subtasks )
    results = job.apply_async() 
    while results.waiting():

        update_status(search_request_id, 'S', dict(message='searching',
                        current=results.completed_count(),
                        total=len(subtasks) ))

        logger.info( "progress: %s/%s" % ( results.completed_count(), len(subtasks) ))
        time.sleep(5)

    logger.info( "progress: %s/%s" % ( results.completed_count(), len(subtasks) ))        
    logger.info("subtasks finished")
    
    if results.successful():
        update_status(search_request_id, 'S', dict(message='exporting',
                        current=results.completed_count(),
                        total=len(subtasks) ))
        export.apply_async((search_request_id,), queue='exportQueue')   
    else:
        update_status(search_request_id, 'F', dict(message='failed',