def _make_config(self, data=None, key=None, val=None, obj=None): config = obj or wandb_internal_pb2.ConfigRecord() if data: for k, v in six.iteritems(data): update = config.update.add() update.key = k update.value_json = json_dumps_safer(json_friendly(v)[0]) if key: update = config.update.add() if isinstance(key, tuple): update.nested_key = key else: update.key = key update.value_json = json_dumps_safer(json_friendly(val)[0]) return config
def _encode(self, value, path_from_root): """Normalize, compress, and encode sub-objects for backend storage. value: Object to encode. path_from_root: `tuple` of key strings from the top-level summary to the current `value`. Returns: A new tree of dict's with large objects replaced with dictionaries with "_type" entries that say which type the original data was. """ # Constructs a new `dict` tree in `json_value` that discards and/or # encodes objects that aren't JSON serializable. if isinstance(value, dict): json_value = {} for key, value in six.iteritems(value): json_value[key] = self._encode(value, path_from_root + (key, )) return json_value else: path = ".".join(path_from_root) friendly_value, converted = util.json_friendly( data_types.val_to_json(self._run, path, value)) json_value, compressed = util.maybe_compress_summary( friendly_value, util.get_h5_typename(value)) if compressed: self.write_h5(path_from_root, friendly_value) return json_value
def _make_summary(self, summary_record: sr.SummaryRecord): pb_summary_record = wandb_internal_pb2.SummaryRecord() for item in summary_record.update: pb_summary_item = pb_summary_record.update.add() key_length = len(item.key) assert key_length > 0 if key_length > 1: pb_summary_item.nested_key.extend(item.key) else: pb_summary_item.key = item.key[0] path_from_root = ".".join(item.key) json_value = self._summary_encode(item.value, path_from_root) json_value, _ = json_friendly(json_value) pb_summary_item.value_json = json.dumps( json_value, cls=WandBJSONEncoderOld, ) for item in summary_record.remove: pb_summary_item = pb_summary_record.remove.add() key_length = len(item.key) assert key_length > 0 if key_length > 1: pb_summary_item.nested_key.extend(item.key) else: pb_summary_item.key = item.key[0] return pb_summary_record
def _summary_encode(self, value: t.Any, path_from_root: str): """Normalize, compress, and encode sub-objects for backend storage. value: Object to encode. path_from_root: `str` dot separated string from the top-level summary to the current `value`. Returns: A new tree of dict's with large objects replaced with dictionaries with "_type" entries that say which type the original data was. """ # Constructs a new `dict` tree in `json_value` that discards and/or # encodes objects that aren't JSON serializable. if isinstance(value, dict): json_value = {} for key, value in six.iteritems(value): json_value[key] = self._summary_encode( value, path_from_root + "." + key) return json_value else: friendly_value, converted = json_friendly( data_types.val_to_json(self._run, path_from_root, value, namespace="summary")) json_value, compressed = maybe_compress_summary( friendly_value, get_h5_typename(value)) if compressed: # TODO(jhr): impleement me pass # self.write_h5(path_from_root, friendly_value) return json_value
def _make_config(self, config_dict, obj=None): config = obj or wandb_internal_pb2.ConfigRecord() for k, v in six.iteritems(config_dict): update = config.update.add() update.key = k update.value_json = json_dumps_safer(json_friendly(v)[0]) return config
def _make_stats(self, stats_dict): stats = wandb_internal_pb2.StatsRecord() stats.stats_type = wandb_internal_pb2.StatsRecord.StatsType.SYSTEM stats.timestamp.GetCurrentTime() for k, v in six.iteritems(stats_dict): item = stats.item.add() item.key = k item.value_json = json_dumps_safer(json_friendly(v)[0]) return stats
def _make_stats(self, stats_dict: dict) -> pb.StatsRecord: stats = pb.StatsRecord() stats.stats_type = pb.StatsRecord.StatsType.SYSTEM stats.timestamp.GetCurrentTime() for k, v in six.iteritems(stats_dict): item = stats.item.add() item.key = k item.value_json = json_dumps_safer(json_friendly(v)[0]) # type: ignore return stats
def json_helper(val): if isinstance(val, WBValue): return val.to_json(artifact) elif val.__class__ == dict: res = {} for key in val: res[key] = json_helper(val[key]) return res else: return util.json_friendly(val)[0]
def _make_config( self, data = None, key = None, val = None, obj = None, ): config = obj or pb.ConfigRecord() if data: for k, v in six.iteritems(data): update = config.update.add() update.key = k update.value_json = json_dumps_safer(json_friendly(v)[0]) # type: ignore if key: update = config.update.add() if isinstance(key, tuple): for k in key: update.nested_key.append(k) else: update.key = key update.value_json = json_dumps_safer(json_friendly(val)[0]) # type: ignore return config
def _make_config( self, data: dict = None, key: Union[Tuple[str, ...], str] = None, val: Any = None, obj: pb.ConfigRecord = None, ) -> pb.ConfigRecord: config = obj or pb.ConfigRecord() if data: for k, v in six.iteritems(data): update = config.update.add() update.key = k update.value_json = json_dumps_safer(json_friendly(v)[0]) if key: update = config.update.add() if isinstance(key, tuple): for k in key: update.nested_key.append(k) else: update.key = key update.value_json = json_dumps_safer(json_friendly(val)[0]) return config
def _sanitize_val(self, val): """Turn all non-builtin values into something safe for YAML""" if isinstance(val, dict): converted = {} for key, value in six.iteritems(val): converted[key] = self._sanitize_val(value) return converted if isinstance(val, slice): converted = dict(slice_start=val.start, slice_step=val.step, slice_stop=val.stop) return converted val, _ = json_friendly(val) if isinstance(val, Sequence) and not isinstance(val, six.string_types): converted = [] for value in val: converted.append(self._sanitize_val(value)) return converted else: if val.__class__.__module__ not in ("builtins", "__builtin__"): val = str(val) return val
def json_friendly_test(orig_data, obj): data, converted = util.json_friendly(obj) utils.assert_deep_lists_equal(orig_data, data) assert converted