示例#1
0
    def __init__(self, name, app, wait_for_initial_tokens=False):
        super().__init__(name, app)
        log.debug("initialize new dataflow runtime process (%s)",
                  self.full_name)

        app_trace = app.trace

        self._channels = {}
        self._current_segment = None
        self._remaining_compute_cycles = None

        # a seekable iterator over all segments in the process trace
        self._trace = more_itertools.seekable(app_trace.get_trace(name),
                                              maxlen=16)

        self._current_segment = None
        self._remaining_compute_cycles = None

        # keep track of the total cycles to process and the sum of cycles
        # already processed
        self._total_cycles = app_trace.accumulate_processor_cycles(name)
        self._total_cycles_processed = {
            p: 0
            for p in self._total_cycles.keys()
        }

        # lets the workload method know whether it is run for the first time
        # or whether it is resumed
        self._is_running = False

        self._wait_for_initial_tokens = wait_for_initial_tokens
示例#2
0
 def __init__(self,
              iterable: Iterable,
              is_cached: bool = True,
              is_error_suppress: bool = True):
     iterable = always_iterable(iterable)
     self.is_cached = is_cached
     self.is_error_suppress = is_error_suppress
     self.iterable = seekable(iterable) if self.is_cached else iterable
示例#3
0
def process(codes):
    it = seekable(enumerate(codes))
    for pos, [opcode_num] in it:
        func, params, offset = parse_opcode_num(codes, opcode_num, pos)
        try:
            result = func(codes, *params) or {}
            it.seek(result.get('seek_to', pos + offset))
        except Halt:
            return
示例#4
0
 def _run(self):
     it = seekable(enumerate(self.codes))
     for pos, [opcode_num] in it:
         func, params, offset, opcode = self._parse_opcode_num(
             self.codes, opcode_num, pos)
         yield {'opcode': opcode}
         try:
             result = func(self.codes, *params) or {}
             it.seek(result.get('seek_to', pos + offset))
         except self.Halt:
             return
示例#5
0
 def test_workload_wait_for_initial_token(
     self, env, dataflow_process, processor, empty_channel
 ):
     dataflow_process._wait_for_initial_tokens = True
     dataflow_process._trace = more_itertools.seekable(
         self.initial_read_trace_generator(), maxlen=16
     )
     env.run()
     dataflow_process._channels["chan"] = weakref.ref(empty_channel)
     dataflow_process.start()
     env.run()
     assert dataflow_process._state == ProcessState.BLOCKED
def read(instance, limit=8):
    data = []
    try:
        rows = mit.seekable(instance.data.iter(size=limit, sort="row_no"))
        rows.seek(0)
        column_names = instance.data.headers_map.keys()
        for count, row in enumerate(rows, start=1):
            data.append(make_row(column_names, row))
            if count == limit:
                break
    except Exception:
        pass
    return data
示例#7
0
 def test_workload_read_block(
     self, env, dataflow_process, processor, empty_channel
 ):
     dataflow_process._trace = more_itertools.seekable(
         self.read_trace_generator(), maxlen=16
     )
     env.run()
     dataflow_process._channels["chan"] = weakref.ref(empty_channel)
     dataflow_process.start()
     env.run()
     dataflow_process.activate(processor)
     env.run()
     finished = env.process(dataflow_process.workload())
     env.run(finished)
     assert dataflow_process._state == ProcessState.BLOCKED
     assert env.now == 1
示例#8
0
    def test_preemption(self, env, dataflow_process, processor, processor2):
        # monkey patch the process to add a trace
        dataflow_process._trace = more_itertools.seekable(
            self.preemption_trace_generator(), maxlen=16
        )
        dataflow_process._total_cycles = {"Test": 10, "Test2": 20}
        dataflow_process._remaining_compute_cycles = {"Test": 0, "Test2": 0}

        env.run()
        dataflow_process.start()
        env.run()
        dataflow_process.activate(processor)
        assert dataflow_process.processor is processor
        env.run()
        env.process(dataflow_process.workload())
        env.run(5)
        dataflow_process.preempt()
        env.run(10)

        assert dataflow_process.processor is None
        assert dataflow_process._remaining_compute_cycles["Test"] == 5
        assert dataflow_process._remaining_compute_cycles["Test2"] == 10
        assert dataflow_process.get_progress() == 0.5

        # continue execution on processor2 for 5 cycles (10 ticks)
        dataflow_process.activate(processor2)
        env.run(15)
        env.process(dataflow_process.workload())
        assert dataflow_process.processor is processor2
        env.run(25)
        dataflow_process.preempt()
        env.run(26)

        assert dataflow_process.processor is None
        assert dataflow_process._remaining_compute_cycles["Test"] == 3
        assert dataflow_process._remaining_compute_cycles["Test2"] == 5
        assert dataflow_process.get_progress() == 0.725

        dataflow_process.activate(processor2)
        env.run(30)
        finished = env.process(dataflow_process.workload())
        env.run(finished)

        assert dataflow_process._remaining_compute_cycles is None
        assert dataflow_process._remaining_compute_cycles is None

        assert dataflow_process.get_progress() == 1.0
示例#9
0
 def _run(
     self,
     env,
     dataflow_process,
     processor,
     trace=None,
     channel=None,
 ):
     dataflow_process._trace = more_itertools.seekable(trace, maxlen=16)
     env.run()
     dataflow_process._channels["chan"] = (
         weakref.ref(channel) if channel else None
     )
     dataflow_process.start()
     env.run()
     dataflow_process.activate(processor)
     env.run()
     finished = env.process(dataflow_process.workload())
     env.run(finished)
示例#10
0
def loc_tz_getter() -> Iterator[DayWithZone]:
    # seekable makes it cache the emitted values
    return seekable(_iter_tzs())
示例#11
0
from more_itertools import seekable

data = "This is example sentence for seeking back and forth".split()

it = seekable(data)
for word in it:
    ...

next(it)
# StopIteration
it.seek(3)
next(it)
# "sentence"
示例#12
0
    def __init__(self, iterable, filename=None):
        self._iterable = more_itertools.seekable(iterable)
        self._filename = filename

        self._ptr = 0  # pointer to the next item
        self._len = None