예제 #1
0
progressfile = 'progress.dat'

## Warming up for loading modules
print(">>> analysis rank %d: Warming up ... " % rank)
for i in range(cfg["num_workers"]):
    channel_data = np.zeros((num_channels, 100), dtype=np.float64)
    executor.submit(perform_analysis, channel_data, -1)
time.sleep(10)
print(">>> analysis rank %d: Warming up ... done" % rank)

#reader = reader_dataman(shotnr, gen_id)
## general reader. engine type and params can be changed with the config file
reader = reader_gen(shotnr,
                    gen_id,
                    cfg["analysis_engine"],
                    cfg["analysis_engine_params"],
                    channel=rank)
reader.Open()

## jyc:
## main loop:
## Fetching data as soon as possible and call workers in the thread pool (or process pool)

step = 0
while (True):
    #for i in range(10):
    stepStatus = reader.BeginStep()
    #print(stepStatus)
    if stepStatus == adios2.StepStatus.OK:
        #var = dataman_IO.InquireVariable("floats")
예제 #2
0
            queue_list[worker_id].task_done()


queue_list = list()
worker_list = list()

for n in range(num_analysis):
    dq = queue.Queue()
    queue_list.append(dq)
    worker = threading.Thread(target=save_data, args=(n, ))
    worker.start()
    worker_list.append(worker)

#reader = reader_dataman(shotnr, gen_id)
## general reader. engine type and params can be changed with the config file
reader = reader_gen(shotnr, gen_id, cfg["engine"], cfg["params"])
reader.Open()

## jyc:
## main loop:
## Fetching data as soon as possible. Saving with Adios will be done by a thread.

step = 0
while (True):
    #for i in range(10):
    stepStatus = reader.BeginStep()
    #print(stepStatus)
    if stepStatus == adios2.StepStatus.OK:
        #var = dataman_IO.InquireVariable("floats")
        #shape = var.Shape()
        #io_array = np.zeros(np.prod(shape), dtype=np.float)
예제 #3
0
        if executor is not None:
            # Only master will execute the following block
            # Use of "__main__" is critical

            # The master thread will keep reading data, while
            # a helper thread (dispatcher) will dispatch jobs in the queue (dq) asynchronously
            # and distribute jobs to other workers.
            # The main idea is not to slow down the master.
            dq = queue.Queue()
            dispatcher = threading.Thread(target=dispatch)
            dispatcher.start()

            # Only the master thread will open a data stream.
            # General reader: engine type and params can be changed with the config file
            if not args.debug:
                reader = reader_gen(cfg["shotnr"], 0, cfg["engine"],
                                    cfg["params"])
                reader.Open()
            else:
                shot = 18431
                nchunk = 10000
                #merge into cfg dict
                reader = read_stream(shot=shot,
                                     nchunk=nchunk,
                                     data_path=cfg["datapath"])
                reader.cfg_extra = {
                    'shot': shot,
                    'nfft': 1000,
                    'window': 'hann',
                    'overlap': 0.0,
                    'detrend': 1,
                    'channel_range': ["ECEI_L0101-2408"],