def __internal_getattribute__(self, name: str, data: SakDbFields, graph: SakDbGraph) -> Any: decoder = SakDbDecoder(graph) type_field = data.get_by_key(f"_{name}:type") if type_field is None: raise Exception(f"Could not infere the type for {name}.") return None if type_field.payload == "list": # If the type is a list. tmp_list = [] for field in data.fields: if not field.key.startswith(f"{name}:"): continue value = json.loads(field.payload, object_hook=decoder.object_hook) tmp_list.append(value) return SakDbList(self, name, tmp_list) elif type_field.payload == "dict": # If the type is a dictionary. tmp_dict: Dict[str, Any] = {} for field in data.fields: if not field.key.startswith(f"{name}:"): continue value = json.loads(field.payload, object_hook=decoder.object_hook) _, field_key = field.key.split(":", 1) tmp_dict[field_key] = value return SakDbDict(self, name, tmp_dict) else: # For all other types. object_field = data.get_by_key(name) if object_field is None: raise Exception(f"No attribute {name} for {self}.") return None value = json.loads(object_field.payload, object_hook=decoder.object_hook) return value
def _save(self) -> None: cl_payload = type(self).__name__ cl_fields = SakDbFields(SakDbField(key="_cl", payload=cl_payload)) previous_data = self.namespace.read(self.key, "_cl") if previous_data is not None: _cl_field = previous_data.get_by_key("_cl") if _cl_field is not None: if _cl_field.payload == cl_payload: return self.namespace.write(self.key, "_cl", cl_fields)
def set_metadata(self, key: str, value: Any) -> None: metada_path = Path(self.name) / "metadata" / key encoder = SakDbEncoder() payload_str = json.dumps(value, default=encoder.default, separators=(",", ":")) data = SakDbFields( SakDbField(key="_type", payload=type(value).__name__), SakDbField(key=key, payload=payload_str), ) self.write_sakdb(metada_path, data)
def write(self, path: Path, value: SakDbFields) -> None: previous_value = self.read(path) # Do not update timestamp if the content didn't change. if previous_value is not None: for prev_field in previous_value.fields: new_field = value.get_by_key(prev_field.key) if new_field is None: continue if new_field.crc == prev_field.crc: new_field.ts = prev_field.ts self.changes[str(path)] = merge(None, value, previous_value)
def session_apply_sakdb(self, path: Path, value: SakDbFields) -> None: # Check if content changed, if not do not update the timestamps. prev_value = self.read_sakdb(path) if prev_value is not None: for prev_field in prev_value.fields: new_field = value.get_by_key(prev_field.key) if new_field is None: continue if new_field.crc == prev_field.crc: new_field.ts = prev_field.ts # Dump sanitized timestamp value to the low level. value_str = sakdb_dumps(value) self._write(path, value_str)
def test_dumps() -> None: # Given. data = SakDbFields( SakDbField( ts=1.0, key="0000000000000000000000000000000000000000", payload="Hello world", )) # When. data_str = sakdb_dumps(data) # Then. assert data_str == ( '{"t":1.0,"k":"0000000000000000000000000000000000000000",' '"c":"md5:3e25960a79dbc69b674cd4ec67a72c62"}' + PAYLOAD_SEPARATOR + '"Hello world"\n')
def test_loads() -> None: # Given. data = SakDbFields( SakDbField( ts=1.0, key="0000000000000000000000000000000000000000", crc="1111111111111111111111111111111111111111", payload="Hello world", )) data_str = sakdb_dumps(data) # When. obj = sakdb_loads(data_str) # Then. assert obj is not None assert len(obj.fields) == 1 assert obj.fields[0].key == "0000000000000000000000000000000000000000" assert obj.fields[0].crc == "1111111111111111111111111111111111111111" assert obj.fields[0].payload == "Hello world"
def __setattr__(self, name: str, value: Any) -> None: if name.startswith("_") or (name in ["namespace", "key"]): super(SakDbObject, self).__setattr__(name, value) return try: super(SakDbObject, self).__setattr__(name, value) if self.namespace.graph is None: raise Exception( f"Namespace {self.namespace.name} must be attached to a graph." ) encoder = SakDbEncoder() fields = [] if isinstance(value, list): fields.append(SakDbField(key=f"_{name}:type", payload="list")) for idx, ivalue in enumerate(value): payload_str = json.dumps(ivalue, default=encoder.default, separators=(",", ":")) fields.append( SakDbField(key=f"{name}:{str(idx)}", payload=payload_str)) elif isinstance(value, dict): fields.append(SakDbField(key=f"_{name}:type", payload="dict")) for ikey, ivalue in value.items(): payload_str = json.dumps(ivalue, default=encoder.default, separators=(",", ":")) fields.append( SakDbField(key=f"{name}:{ikey}", payload=payload_str)) else: fields.append( SakDbField(key=f"_{name}:type", payload=type(value).__name__)) payload_str = json.dumps(value, default=encoder.default, separators=(",", ":")) fields.append(SakDbField(key=name, payload=payload_str)) metadata_file = "meta" data = SakDbFields(*fields) previous_data = self.namespace.read(self.key, metadata_file) if previous_data is not None: # TODO(witt): Maybe it is not necessary to drop the _{name}:type. previous_data.drop_by_key_prefix(f"_{name}:type") previous_data.drop_by_key_prefix(f"{name}:") new_data = merge(None, data, previous_data) else: new_data = data self.namespace.write(self.key, metadata_file, new_data) except Exception as e: raise (e)