Exemple #1
0
def _conf_db(app):
    import gridfs
    from dlstats.utils import get_mongo_db
    from widukind_web.utils import create_or_update_indexes
    app.widukind_db = get_mongo_db(app.config.get("MONGODB_URL"))
    app.widukind_fs = gridfs.GridFS(app.widukind_db)
    create_or_update_indexes(app.widukind_db)
def main():
    import logging
    
    logging.basicConfig(level=logging.DEBUG)
    db = utils.get_mongo_db()
    
    #mp.set_start_method('spawn')
    m = mp.Manager()
    q = m.JoinableQueue()
        
    pool = mp.Pool(processes=4)
    results = []    
    try:
        results.append(pool.apply_async(producer, [q]))
        results.append(pool.apply_async(consumer, [q]))
        #print(dir(pool))
        #print(dir(result))
        #print("result : ", result.get())
        for r in results:
            print(r.get())
        
        while not q.empty(): # wait for processing to finish
            time.sleep(0.1)         
        
    finally:
        pool.close()
        pool.join()

    """
Exemple #3
0
def main():
    import logging
    logging.basicConfig(level=logging.DEBUG)
    db = utils.get_mongo_db()
    greenlets = []

    q = Queue()

    greenlets.append(
        gevent.spawn(producer, q, db, constants.COL_PROVIDERS,
                     common.slug_provider))
    greenlets.append(
        gevent.spawn(producer, q, db, constants.COL_DATASETS,
                     common.slug_dataset))
    greenlets.append(
        gevent.spawn(producer, q, db, constants.COL_SERIES,
                     common.slug_series))
    greenlets.append(gevent.spawn(consumer, q, db))
    start = time.time()
    try:
        gevent.joinall(greenlets)
    except KeyboardInterrupt:
        pass
    end = time.time()
    print("%.3f seconds" % (end - start))
def main():
    import logging    
    logging.basicConfig(level=logging.DEBUG)
    db = utils.get_mongo_db()
    
    utils.create_or_update_indexes(db)
    
    start = time.time()
    try:        
        
        _start = time.time()
        consumer(db, constants.COL_PROVIDERS, slug_provider, {"name": True}, 1000)
        print("%s - %.3f seconds" % (constants.COL_PROVIDERS, (time.time() - _start)))
        
        _start = time.time()
        consumer(db, constants.COL_DATASETS, slug_dataset, {"provider": True, "datasetCode": True}, 1000)
        print("%s - %.3f seconds" % (constants.COL_DATASETS, (time.time() - _start)))
        
        _start = time.time()
        consumer(db, constants.COL_SERIES, slug_series, {"provider": True, "datasetCode": True, "key": True}, 1000)
        print("%s - %.3f seconds" % (constants.COL_SERIES, (time.time() - _start)))
        
        #TODO: il faut un reindex à la fin sur chaque series modifié ???
        """
        Migration:
        - 1. Lancer mise à jour slug
        - 2. utils.create_or_update_indexes(db)
        """
        
    except KeyboardInterrupt:
        pass
    end = time.time()
    print("%.3f seconds" % (end - start))
def main():
    import logging

    logging.basicConfig(level=logging.DEBUG)
    db = utils.get_mongo_db()

    #mp.set_start_method('spawn')
    m = mp.Manager()
    q = m.JoinableQueue()

    pool = mp.Pool(processes=4)
    results = []
    try:
        results.append(pool.apply_async(producer, [q]))
        results.append(pool.apply_async(consumer, [q]))
        #print(dir(pool))
        #print(dir(result))
        #print("result : ", result.get())
        for r in results:
            print(r.get())

        while not q.empty():  # wait for processing to finish
            time.sleep(0.1)

    finally:
        pool.close()
        pool.join()
    """
def consumer(queue):
    db = utils.get_mongo_db()
    while True:
        try:
            query = queue.get()
            if query is None:
                break
            print("CONS : ", query)
            queue.task_done()
        except TimeoutError:
            print("TimeoutError")
    queue.task_done()        
def producer(queue):
    db = utils.get_mongo_db()
    for doc in db[constants.COL_PROVIDERS].find({"slug": {"$exists": False}}):
        query = {
            "col": constants.COL_PROVIDERS,
            "id": doc['_id'],
            "slug": common.slug_provider(doc)
        }
        print("PRO : ", query)
        queue.put(query)
    
    return "TEST"
def consumer(queue):
    db = utils.get_mongo_db()
    while True:
        try:
            query = queue.get()
            if query is None:
                break
            print("CONS : ", query)
            queue.task_done()
        except TimeoutError:
            print("TimeoutError")
    queue.task_done()
def producer(queue):
    db = utils.get_mongo_db()
    for doc in db[constants.COL_PROVIDERS].find({"slug": {"$exists": False}}):
        query = {
            "col": constants.COL_PROVIDERS,
            "id": doc['_id'],
            "slug": common.slug_provider(doc)
        }
        print("PRO : ", query)
        queue.put(query)

    return "TEST"
def main():
    import logging
    logging.basicConfig(level=logging.DEBUG)
    db = utils.get_mongo_db()

    utils.create_or_update_indexes(db)

    start = time.time()
    try:

        _start = time.time()
        consumer(db, constants.COL_PROVIDERS, slug_provider, {"name": True},
                 1000)
        print("%s - %.3f seconds" % (constants.COL_PROVIDERS,
                                     (time.time() - _start)))

        _start = time.time()
        consumer(db, constants.COL_DATASETS, slug_dataset, {
            "provider": True,
            "datasetCode": True
        }, 1000)
        print("%s - %.3f seconds" % (constants.COL_DATASETS,
                                     (time.time() - _start)))

        _start = time.time()
        consumer(db, constants.COL_SERIES, slug_series, {
            "provider": True,
            "datasetCode": True,
            "key": True
        }, 1000)
        print("%s - %.3f seconds" % (constants.COL_SERIES,
                                     (time.time() - _start)))

        #TODO: il faut un reindex à la fin sur chaque series modifié ???
        """
        Migration:
        - 1. Lancer mise à jour slug
        - 2. utils.create_or_update_indexes(db)
        """

    except KeyboardInterrupt:
        pass
    end = time.time()
    print("%.3f seconds" % (end - start))
def main():
    import os
    import logging

    global queue
    
    #set PYTHONASYNCIODEBUG=1
    
    logging.basicConfig(level=logging.DEBUG)
    db = utils.get_mongo_db()
    
    if os.name == 'nt':        
        io_loop = asyncio.ProactorEventLoop()
        asyncio.set_event_loop(io_loop)
    else:
        io_loop = asyncio.get_event_loop()
    
    # Avec size 2: traite 3 par 3
    queue = asyncio.Queue(maxsize=2, loop=io_loop)
    
    ensure_future = asyncio.async        
def main():
    import os
    import logging

    global queue

    #set PYTHONASYNCIODEBUG=1

    logging.basicConfig(level=logging.DEBUG)
    db = utils.get_mongo_db()

    if os.name == 'nt':
        io_loop = asyncio.ProactorEventLoop()
        asyncio.set_event_loop(io_loop)
    else:
        io_loop = asyncio.get_event_loop()

    # Avec size 2: traite 3 par 3
    queue = asyncio.Queue(maxsize=2, loop=io_loop)

    ensure_future = asyncio. async
Exemple #13
0
 def __init__(
     self,
     label="",
     colname=None,
     id_attr="_id",
     label_attr="",
     query={},
     validators=None,
     allow_blank=False,
     blank_text="---",
     **kwargs
 ):
     super().__init__(label, validators, **kwargs)
     self.id_attr = id_attr
     self.label_attr = label_attr
     self.allow_blank = allow_blank
     self.blank_text = blank_text
     self.colname = colname
     self.query = query or {}
     self.db = get_mongo_db()
     self.col = self.db[self.colname]
     self.queryset = self.db[self.colname].find(query)