def _add_tensor(self, step_num, worker, tensor_object: TensorLocation): is_reduction = False if REDUCTIONS_PREFIX in tensor_object.tensorname: tname, red_name, abs = reverse_reduction_tensor_name( tensor_object.tensorname) tensor_object.tensorname = tname is_reduction = True else: tname = tensor_object.tensorname if tname not in self._tensors: tensor = Tensor(tname, trial=self, cache=self.cache) self._tensors[tname] = tensor tensor = self._tensors[tname] if is_reduction: tensor.add_reduction_step(tensor_object.mode, tensor_object.mode_step, worker, red_name, abs, tensor_object) else: tensor.add_step(tensor_object.mode, tensor_object.mode_step, worker, tensor_object) self._populate_step_dict(tensor_object, step_num) self._populate_global_step_to_tensor_name_map(tensor_object, step_num) self._populate_workers_for_global_step(step_num, worker) self._populate_mode_to_tensor_name_map(tensor_object)
def _update_tensors_from_json( self, index_tensors_dict, step, response: bytes, path, worker) -> Dict[str, Dict[int, Dict[str, TensorLocation]]]: """Return a triply nested dict referring to tensor data. Example: { 'dense/bias:0': { 0: { 'tensor_location': <TensorLocation object> }, 2: { ... }, ... }, 'conv2d/kernel:0': { ... }, ... } """ try: index_dict = json.loads(response) except ValueError: raise IndexReaderException("Empty/Corrupt Index File") IndexReader._validate(index_dict) index_meta = index_dict["meta"] mode = index_meta["mode"] mode = ModeKeys[mode.strip()] mode_step = index_meta["mode_step"] event_file_name = os.path.join(path, index_meta["event_file_name"]) tensors = index_dict["tensor_payload"] for tensor in tensors: tensor_name = tensor["tensorname"] start_idx = tensor["start_idx"] length = tensor["length"] tensor_location = TensorLocation(tensor_name, mode, mode_step, event_file_name, start_idx, length, worker) if tensor_name in index_tensors_dict: if step in index_tensors_dict[tensor_name]: index_tensors_dict[tensor_name][step].update( {worker: { "tensor_location": tensor_location }}) else: index_tensors_dict[tensor_name].update( {step: { worker: { "tensor_location": tensor_location } }}) else: index_tensors_dict[tensor_name] = { step: { worker: { "tensor_location": tensor_location } } } return index_tensors_dict
def run(self): while True: event_in_queue = self._queue.get() if isinstance(event_in_queue, EventWithIndex): # checking whether there is an object of EventWithIndex, # which is written by write_summary_with_index event = event_in_queue.event else: event = event_in_queue if event is self._sentinel_event: self._queue.task_done() break try: # write event positions = self._ev_writer.write_event(event) # write index if isinstance(event_in_queue, EventWithIndex): eventfile = self._ev_writer.name() eventfile = get_relative_event_file_path(eventfile) tensorlocation = TensorLocation( tname=event_in_queue.tensorname, mode=event_in_queue.get_mode(), mode_step=event_in_queue.mode_step, event_file_name=eventfile, start_idx=positions[0], length=positions[1], worker=parse_worker_name_from_file(eventfile), ) self._ev_writer.index_writer.add_index(tensorlocation) # Flush the event writer every so often. now = time.time() if now > self._next_event_flush_time: self._ev_writer.flush() # Do it again in two minutes. self._next_event_flush_time = now + self._flush_secs finally: self._queue.task_done()
def _update_tensors_from_json( self, index_tensors_dict, step, response: bytes, path, worker ) -> Dict[str, Dict[int, Dict[str, TensorLocation]]]: """Return a triply nested dict referring to tensor data. Example: { 'dense/bias:0': { 0: { 'tensor_location': <TensorLocation object> }, 2: { ... }, ... }, 'conv2d/kernel:0': { ... }, ... } """ try: index_dict = json.loads(response) except ValueError: raise IndexReaderException("Empty/Corrupt Index File") IndexReader._validate(index_dict) index_meta = index_dict["meta"] mode = index_meta["mode"] mode = ModeKeys[mode.strip()] mode_step = index_meta["mode_step"] to_update_index_dict = [] if "tensor_payload" in index_dict and len(index_dict["tensor_payload"]): event_file_name = os.path.join(path, index_meta["event_file_name"]) for tensor in index_dict["tensor_payload"]: tensor_name = tensor["tensorname"] start_idx = tensor["start_idx"] length = tensor["length"] tensor_location = TensorLocation( tensor_name, mode, mode_step, event_file_name, start_idx, length, worker ) to_update_index_dict.append((tensor_name, step, tensor_location)) if "shape_payload" in index_dict and len(index_dict["shape_payload"]): for tensor in index_dict["shape_payload"]: tensor_name = tensor["tensorname"] original_name = tensor["originalname"] shape = tensor["shape"] ts = TensorShape(tensor_name, mode, mode_step, shape, original_name) to_update_index_dict.append((tensor_name, step, ts)) for tu in to_update_index_dict: tensor_name, step, obj = tu if isinstance(obj, TensorLocation): obj_dict = {"tensor_location": obj} elif isinstance(obj, TensorShape): obj_dict = {"tensor_shape": obj} if tensor_name in index_tensors_dict: if step in index_tensors_dict[tensor_name]: index_tensors_dict[tensor_name][step].update({worker: obj_dict}) else: index_tensors_dict[tensor_name].update({step: {worker: obj_dict}}) else: index_tensors_dict[tensor_name] = {step: {worker: obj_dict}} return index_tensors_dict