class FieldProtector: PROTECTED = 'protected' NOT_PROTECTED = 'not_protected' BLINKING = 'blinking' def __init__(self, field: Field): self.field = field self._blink_animator = Animator(delay=1, max_states=2) self._protected_timer = Timer(delay=15) self._blink_timer = Timer(delay=6) self._state = self.NOT_PROTECTED def update(self): if self._state == self.PROTECTED: if self._protected_timer.tick(): self._state = self.BLINKING self._blink_timer.start() elif self._state == self.BLINKING: if self._blink_timer.tick(): self._change_base_border_tye(CellType.BRICK) self._state = self.NOT_PROTECTED else: state = self._blink_animator() self._change_base_border_tye( CellType.BRICK if state else CellType.CONCRETE) @property def cells_around_base(self): return [(11, 25), (11, 24), (11, 23), (12, 23), (13, 23), (14, 23), (14, 24), (14, 25)] def _change_base_border_tye(self, ct: CellType): for x, y in self.cells_around_base: self.field.map.set_cell_col_row(x, y, ct) def activate(self): self._state = self.PROTECTED self._blink_timer.stop() self._protected_timer.start() self._change_base_border_tye(CellType.CONCRETE) # 1. защитить базу бетоном # 2. запустить таймер на 20 сек # 3. когда таймер кончится - запустить аниматор и таймер мигания на 10 сек # 4. пока таймер мигания - каждую секунду меняем щит с бетона на кирпич и обратно! ...
class ProbeHistory: def __init__(self): self.hits = [] self.hits_lookup = dict() self.counters = dict() self.timer = Timer() self.lost = 0 def append(self, hit): key = hit.tid if key in self.hits_lookup: hit.prev = self.hits_lookup[key] self.timer.tick(hit) else: hit.prev = None self.hits.append(hit) self.hits_lookup[key] = hit hit.update_counters(self.counters) def last_hit(self, key): return self.hits_lookup.get(key) def add_lost(self, lost): self.lost += lost def all_hits(self, key): all_hits = [] last = self.last(key) while last is not None: all_hits.append(last) last = last.prev return all_hits def __str__(self): out = str(self.timer) for key in self.counters: counter = self.counters[key] out += "{}: {}".format(key, str(counter)) out += "lost: {}".format(self.lost) return out
def load(self): self.loading_lock.clear() timer = Timer() self.logger.info("Loading model %s" % self.model_id) timer.start() try: # opt = DefaultOpt(self.user_opt['models'], 'src-test.txt', 'temp.txt') # should read model paths from json, not fixed opt = self.opt self.translator = build_translator(opt, report_score=False, out_file=codecs.open( os.devnull, "w", "utf-8")) except RuntimeError as e: raise ServerModelError("Runtime Error: %s" % str(e)) timer.tick("model_loading") self.load_time = timer.tick() self.reset_unload_timer() self.loading_lock.set()
def run(self, inputs, is_split=False): """Translate `inputs` using this model Args: inputs (List[dict[str, str]]): [{"src": "..."},{"src": ...}] Returns: result (list): translations times (dict): containing times """ self.stop_unload_timer() timer = Timer() timer.start() self.logger.info("Running translation using %s" % self.model_id) if not self.loading_lock.is_set(): self.logger.info( "Model #%s is being loaded by another thread, waiting" % self.model_id) if not self.loading_lock.wait(timeout=30): raise ServerModelError("Model %s loading timeout" % self.model_id) else: if not self.loaded: self.load() timer.tick(name="load") elif self.opt.cuda: self.to_gpu() timer.tick(name="to_gpu") texts = [] head_spaces = [] tail_spaces = [] sentence_objs = [] for i, inp in enumerate(inputs): src = inp['src'] if src.strip() == "": head_spaces.append(src) texts.append("") tail_spaces.append("") else: whitespaces_before, whitespaces_after = "", "" match_before = re.search(r'^\s+', src) match_after = re.search(r'\s+$', src) if match_before is not None: whitespaces_before = match_before.group(0) if match_after is not None: whitespaces_after = match_after.group(0) head_spaces.append(whitespaces_before) tail_spaces.append(whitespaces_after) sent_obj = self.maybe_preprocess(src.strip(), is_split) sentence_objs.append(sent_obj) tok = self.maybe_tokenize(sent_obj.tokenized_list) texts.extend(tok) empty_indices = [i for i, x in enumerate(texts) if x == ""] texts_to_translate = [x for x in texts if x != ""] self.logger.debug(f'text after preprocess: {texts_to_translate}') scores = [] predictions = [] if len(texts_to_translate) > 0: try: scores, predictions = self.translator.translate( texts_to_translate, None, '', 1, 'sent', False, False) except (RuntimeError, Exception) as e: err = "Error: %s" % str(e) self.logger.error(err) self.logger.error("repr(text_to_translate): " + repr(texts_to_translate)) self.logger.error("model: #%s" % self.model_id) self.logger.error("model opt: " + str(self.opt.__dict__)) self.logger.error(traceback.format_exc()) raise ServerModelError(err) timer.tick(name="translation") self.logger.info( """Using model [%s], input num [%d], translation time: [%f]""" % (self.model_id, len(texts), timer.times['translation'])) self.reset_unload_timer() # NOTE: translator returns lists of `n_best` list def flatten_list(_list): return sum(_list, []) results = flatten_list(predictions) self.logger.debug(f'text after translate: {results}') scores = [score_tensor.item() for score_tensor in flatten_list(scores)] source_lines = [ line for obj in sentence_objs for line in obj.get_sentence_list() ] final_result = [ self.maybe_postprocess(target, source) for target, source in zip(results, source_lines) ] self.logger.debug(f'text after postprocess: {final_result}') final_result = self.__get_final_result(final_result, sentence_objs) final_result = self.postprocess_after_merge(final_result) # build back results with empty texts for i in empty_indices: j = i * self.opt.n_best results = results[:j] + [""] * self.opt.n_best + results[j:] aligns = aligns[:j] + [None] * self.opt.n_best + aligns[j:] scores = scores[:j] + [0] * self.opt.n_best + scores[j:] head_spaces = [h for h in head_spaces for i in range(self.opt.n_best)] tail_spaces = [h for h in tail_spaces for i in range(self.opt.n_best)] final_result = [ "".join(items) for items in zip(head_spaces, final_result, tail_spaces) ] self.logger.info("Translation Results: %d", len(final_result)) return final_result