Beispiel #1
0
def init_worker(counter: Synchronized) -> None:
    """
    This function runs only under parallel mode. It initializes the
    individual processes which are also called workers.
    """
    global _worker_id

    with counter.get_lock():
        counter.value += 1
        _worker_id = counter.value
    """
    You can now use _worker_id.
    """

    # Clear the cache
    from zerver.lib.cache import get_cache_backend
    cache = get_cache_backend(None)
    cache.clear()

    # Close all connections
    connections.close_all()

    destroy_test_databases(_worker_id)
    create_test_databases(_worker_id)
    initialize_worker_path(_worker_id)

    # We manually update the upload directory path in the url regex.
    from zproject.dev_urls import avatars_url
    new_root = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars")
    avatars_url.default_args['document_root'] = new_root
Beispiel #2
0
def init_worker(counter: Synchronized) -> None:
    """
    This function runs only under parallel mode. It initializes the
    individual processes which are also called workers.
    """
    global _worker_id

    with counter.get_lock():
        counter.value += 1
        _worker_id = counter.value

    """
    You can now use _worker_id.
    """

    test_classes.API_KEYS = {}

    # Clear the cache
    from zerver.lib.cache import get_cache_backend
    cache = get_cache_backend(None)
    cache.clear()

    # Close all connections
    connections.close_all()

    destroy_test_databases(_worker_id)
    create_test_databases(_worker_id)

    # Every process should upload to a separate directory so that
    # race conditions can be avoided.
    settings.LOCAL_UPLOADS_DIR = '{}_{}'.format(settings.LOCAL_UPLOADS_DIR,
                                                _worker_id)

    def is_upload_avatar_url(url: RegexURLPattern) -> bool:
        if url.regex.pattern == r'^user_avatars/(?P<path>.*)$':
            return True
        return False

    # We manually update the upload directory path in the url regex.
    from zproject import dev_urls
    found = False
    for url in dev_urls.urls:
        if is_upload_avatar_url(url):
            found = True
            new_root = os.path.join(settings.LOCAL_UPLOADS_DIR, "avatars")
            url.default_args['document_root'] = new_root

    if not found:
        print("*** Upload directory not found.")
def rate_server(host: str, port: int, client_count: Synchronized) -> None:
    """rate server"""

    with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as socket_server:

        socket_server.bind((host, port))
        socket_server.listen()

        while True:

            conn, _ = socket_server.accept()

            client_con_thread = ClientConnectionThread(conn, client_count)
            client_con_thread.start()

            with client_count.get_lock():
                client_count.value += 1
Beispiel #4
0
def rate_server(host: str, port: int, client_count: Synchronized) -> None:
    """rate server"""

    with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as socket_server:

        socket_server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        socket_server.bind((host, port))
        socket_server.listen(100)

        while True:

            conn, addr = socket_server.accept()

            client_con_thread = ClientConnectionThread(conn, addr,
                                                       client_count)
            client_con_thread.start()

            with client_count.get_lock():
                client_count.value += 1
                log_client_event(client_con_thread.ident, addr[0], addr[1],
                                 "connect")
Beispiel #5
0
def server_app(shared_data: Synchronized) -> None:
    while True:
        time.sleep(5)
        with shared_data.get_lock():
            shared_data.value = shared_data.value + 10
def pipeline_in_one_thread(
    tasks_queue: mp.Queue,
    tasks_queue_index: int,
    success_shared: mp_shared.Synchronized,
    device_name: str,
    db_writer_queue: mp.Queue,
    task_name: str,
):
    with tf.device(device_name):
        # =======================================================
        # add initialization part of the pipeline here
        pre_processor = preprocessing.TextPreprocessor('Just for init')
        if os.path.exists(os.path.join('rsc/models')):
            model_dir = os.path.join('rsc/models')
            MER_model = MER.MatRecognition(
                model_path=os.path.join(model_dir, 'matRecognition'),
                bert_path=os.path.join(model_dir, 'MATBert_config'),
                mat_identify_model_path=os.path.join(model_dir,
                                                     'matIdentification'),
                mat_identify_bert_path=os.path.join(model_dir, 'Bert_config'),
            )
        else:
            MER_model = MER.MatRecognition()
        # =======================================================

        # success = True
        success_tasks = []
        error_tasks = []

        with timebudget('{} queue {} by {} @ {}'.format(
                task_name, tasks_queue_index, device_name,
                socket.gethostname())):
            while True:
                records = tasks_queue.get()
                if records is None:
                    break

                if not success_shared.value:
                    continue

                para_batch = []
                meta_ids = []

                for para in records:
                    doc = pre_processor._process(para['text'])
                    para_batch.append(doc.user_data['text'])
                    meta_ids.append(para['_id']['$oid'])

                try:
                    results_batch = MER_model.mat_recognize(para_batch)
                    db_writer_queue.put((meta_ids, results_batch))
                    # if len(success_tasks) > 0:
                    #     raise mp.ProcessError(f'Queue {tasks_queue_index}: Error for debug!')
                except Exception as e:
                    results_batch = []
                    with success_shared.get_lock():
                        success_shared.value = 0
                    print(f'{task_name} queue {tasks_queue_index}: {e}')
                    raise e

                if success_shared.value:
                    success_tasks.extend(meta_ids)
                else:
                    error_tasks.extend(meta_ids)