コード例 #1
0
def forward(Q, cfg, args, timeout):
    """To be executed by a local thread. Pops items from the queue and forwards them."""
    global comm, rank
    logger = logging.getLogger("middleman")
    logger.info(
        f"Worker: Creating writer_gen: engine={cfg[args.transport_tx]['engine']}"
    )

    # suffix = ""  # if not args.debug else '-MM'
    ch_name = gen_channel_name(cfg["diagnostic"])
    writer = writer_gen(cfg[args.transport_tx], ch_name)
    logger.info(f"Worker: Streaming channel name = {ch_name}")

    tx_list = []
    is_first = True
    while True:
        # msg = None
        try:
            msg = Q.get(timeout=timeout)
            logger.info(
                f"Worker: Receiving from Queue: {msg} - {msg.data.shape}, {msg.data.dtype}"
            )
            if is_first:
                writer.DefineVariable(
                    gen_var_name(cfg)[rank], msg.data.shape, msg.data.dtype)
                #if msg.attrs is not None:
                writer.DefineAttributes("stream_attrs", msg.attrs)
                logger.info(
                    f"Worker: Defining stream_attrs for forwarded stream: {msg.attrs}"
                )
                writer.Open()
                logger.info("Worker: Starting forwarding process")
                is_first = False
        except queue.Empty:
            logger.info(
                "Worker: Empty queue after waiting until time-out. Exiting")
            break

        logger.info(
            f"Worker Forwarding chunk {msg.tstep_idx}. Data = {msg.data.shape}"
        )
        writer.BeginStep()
        writer.put_data(msg)
        writer.EndStep()
        logger.info(f"Worker: Done writing chunk {msg.tstep_idx}.")
        tx_list.append(msg.tstep_idx)

        Q.task_done()
        logger.info(f"Consumed tidx={msg.tstep_idx}")

    writer.Close()
    logger.info(
        f"Worker: Exiting send loop. Transmitted {len(tx_list)} time chunks: {tx_list}"
    )
    logger.info(writer.transfer_stats())
コード例 #2
0
ファイル: generator_brute.py プロジェクト: lastephey/delta
    'LoFreq': dobj.lo,
    'LensFocus': dobj.sf,
    'LensZoom': dobj.sz
})

# Trying to load all h5 data into memory
print("Loading h5 data into memory")
timebase = dobj.time_base_full()
tstarts = timebase[::batch_size]
tstops = timebase[batch_size - 1::batch_size]
_, data = dobj.get_data(trange=[timebase[0], timebase[-1]], norm=1, verbose=0)
data_all = np.array_split(data, num_batches, axis=-1)

#writer = writer_dataman(shotnr, gen_id)
#writer = writer_gen(shotnr, gen_id, cfg["engine"], cfg["params"])
writer = writer_gen(cfg["transport_nersc"])

data_arr = data_all[0]
writer.DefineVariable("tstep", np.array(0))
writer.DefineVariable("floats", data_arr)
writer.DefineVariable("trange", np.array([0.0, 0.0]))
writer.DefineAttributes("cfg", cfg)
writer.Open()

print("Start sending:")
t0 = time.time()
for i in range(nstep):
    if (rank == 0):
        print("Sending: {0:d} / {1:d}".format(i, nstep))
    with writer.step() as w:
        w.put_data("tstep", np.array(i))
コード例 #3
0
    "params": {
        "IPAddress": "128.55.205.18",
        "Timeout": "120",
        "Port": "50001",
        "TransportMode": "reliable"
    }
}

channel_name = gen_channel_name(2408, rank)

if rank == 0:
    logging.info(
        "==================I am test_reader_multichannel===================")
logging.info(f"Channel_name = {channel_name}")

w = writer_gen(cfg_transport, channel_name)
w.DefineVariable("dummy", (192, 10_000), np.float64)
w.DefineVariable("tstep", (1, 1), np.int32)
w.Open()
w.DefineAttributes("strem_attrs", {"test": "yes"})

for tstep in range(1, 100):

    if rank == tstep % size:
        data = np.random.normal(1000.0 * (rank + 1) + tstep,
                                0.1,
                                size=(192, 10_000))
        chunk = twod_chunk(data)
        tdata = twod_chunk(
            np.array([[1]], dtype=np.int32) *
            np.int32(tstep))  # Clunky way to send time-step data :)
コード例 #4
0
with open(args.config, "r") as df:
    cfg = json.load(df)

# Set up the logger
with open('configs/logger.yaml', 'r') as f:
    log_cfg = yaml.safe_load(f.read())
logging.config.dictConfig(log_cfg)
logger = logging.getLogger("generator")
logger.info("Starting up...")

# Instantiate a dataloader
dataloader = get_loader(cfg)
configname = "transport_tx" if not args.kstar else "transport_rx"
logger.info(f"Creating writer_gen: engine={cfg[configname]['engine']}")

writer = writer_gen(cfg[configname], gen_channel_name(cfg["diagnostic"]))
logger.info(f"Streaming channel name = {gen_channel_name(cfg['diagnostic'])}")
# Give the writer hints on what kind of data to transfer

writer.DefineVariable(
    gen_var_name(cfg)[rank], dataloader.get_chunk_shape(), dataloader.dtype)
# TODO: Clean up naming conventions for stream attributes
logger.info(f"Writing attributes: {dataloader.attrs}")

writer.Open()
writer.DefineAttributes("stream_attrs", dataloader.attrs)

logger.info("Start sending on channel:")
batch_gen = dataloader.batch_generator()
for nstep, chunk in enumerate(batch_gen):
    # TODO: Do we want to place filtering in the generator? This would allow us to
コード例 #5
0
with open(args.config, "r") as df:
    cfg = json.load(df)

# Set up the logger
with open('configs/logger.yaml', 'r') as f:
    log_cfg = yaml.safe_load(f.read())
logging.config.dictConfig(log_cfg)
logger = logging.getLogger("generator")
logger.info("Starting up...")

# Instantiate a dataloader
dataloader = get_loader(cfg)
sectionname = "transport_tx" if not args.kstar else "transport_rx"
logger.info(f"Creating writer_gen: engine={cfg[sectionname]['engine']}")

writer = writer_gen(cfg[sectionname], gen_channel_name(cfg["diagnostic"]))
logger.info(f"Streaming channel name = {gen_channel_name(cfg['diagnostic'])}")
# Give the writer hints on what kind of data to transfer

writer.DefineVariable(gen_var_name(cfg)[rank],
                      dataloader.get_chunk_shape(),
                      dataloader.dtype)
# TODO: Clean up naming conventions for stream attributes
logger.info(f"Writing attributes: {dataloader.attrs}")

writer.Open()
writer.DefineAttributes("stream_attrs", dataloader.attrs)

logger.info("Start sending on channel:")
batch_gen = dataloader.batch_generator()
for nstep, chunk in enumerate(batch_gen):
コード例 #6
0
# Get a data_loader
dobj = KstarEcei(shot=shotnr,data_path=datapath,clist=my_channel_range,verbose=False)
cfg.update({'TriggerTime':dobj.tt.tolist(),'SampleRate':[dobj.fs/1e3],
            'TFcurrent':dobj.itf/1e3,'Mode':dobj.mode, 
            'LoFreq':dobj.lo,'LensFocus':dobj.sf,'LensZoom':dobj.sz})

# Trying to load all h5 data into memory
print("Loading h5 data into memory")
timebase = dobj.time_base_full()
tstarts = timebase[::batch_size]
tstops = timebase[batch_size-1::batch_size]
_,data = dobj.get_data(trange=[timebase[0],timebase[-1]],norm=1,verbose=0)
data_all = np.array_split(data,num_batches,axis=-1)

#writer = writer_dataman(shotnr, gen_id)
writer = writer_gen(shotnr, gen_id, cfg["engine"], cfg["params"])

data_arr = data_all[0]
varData = writer.DefineVariable("floats",data_arr)
varTime = writer.DefineVariable("trange",np.array([0.0,0.0]))
writer.DefineAttributes("cfg",cfg)
writer.Open()

print("Start sending:")
t0 = time.time()
for i in range(nstep):
    if(rank == 0):
        print("Sending: {0:d} / {1:d}".format(i, nstep))
    with writer.step() as w:
        w.put_data(varTime,np.array([tstarts[i],tstops[i]]))
        w.put_data(varData,data_all[i])