def write_metadata(self, data_set, io_manager=None, location=None, read_first=False): """ Write all metadata in this DataSet to storage. Args: data_set (DataSet): the data we're storing io_manager (io_manager): the base location to write to location (str): the file location within io_manager read_first (bool, optional): read previously saved metadata before writing? The current metadata will still be the used if there are changes, but if the saved metadata has information not present in the current metadata, it will be retained. Default True. """ # this statement is here to make the linter happy if io_manager is None or location is None: raise Exception('please set io_manager and location arguments ') if read_first: # In case the saved file has more metadata than we have here, # read it in first. But any changes to the in-memory copy should # override the saved file data. memory_metadata = data_set.metadata data_set.metadata = {} self.read_metadata(data_set) deep_update(data_set.metadata, memory_metadata) fn = io_manager.join(location, self.metadata_file) with io_manager.open(fn, 'w', encoding='utf8') as snap_file: json.dump(data_set.metadata, snap_file, sort_keys=True, indent=4, ensure_ascii=False, cls=NumpyJSONEncoder)
def add_metadata(self, new_metadata): """ Update DataSet.metadata with additional data. Args: new_metadata (dict): new data to be deep updated into the existing metadata """ deep_update(self.metadata, new_metadata)
def write_metadata( self, data_set: "qcodes.data.data_set.DataSet", io_manager=None, location=None, read_first=False, **kwargs, ): """ Write all metadata in this DataSet to storage. Args: data_set: the data we're storing io_manager (io_manager): the base location to write to location (str): the file location within io_manager read_first (Optional[bool]): read previously saved metadata before writing? The current metadata will still be the used if there are changes, but if the saved metadata has information not present in the current metadata, it will be retained. Default True. kwargs (dict): From the dicionary the key sort_keys is extracted (default value: False). If True, then the keys of the metadata will be stored sorted in the json file. Note: sorting is only possible if the keys of the metadata dictionary can be compared. """ sort_keys = kwargs.get('sort_keys', False) # this statement is here to make the linter happy if io_manager is None or location is None: raise Exception('please set io_manager and location arguments ') if read_first: # In case the saved file has more metadata than we have here, # read it in first. But any changes to the in-memory copy should # override the saved file data. memory_metadata = data_set.metadata data_set.metadata = {} self.read_metadata(data_set) deep_update(data_set.metadata, memory_metadata) fn = io_manager.join(location, self.metadata_file) with io_manager.open(fn, 'w', encoding='utf8') as snap_file: json.dump(data_set.metadata, snap_file, sort_keys=sort_keys, indent=4, ensure_ascii=False, cls=NumpyJSONEncoder)
def write_metadata(self, data_set: "qcodes.data.data_set.DataSet", io_manager=None, location=None, read_first=False, **kwargs): """ Write all metadata in this DataSet to storage. Args: data_set: the data we're storing io_manager (io_manager): the base location to write to location (str): the file location within io_manager read_first (Optional[bool]): read previously saved metadata before writing? The current metadata will still be the used if there are changes, but if the saved metadata has information not present in the current metadata, it will be retained. Default True. """ # this statement is here to make the linter happy if io_manager is None or location is None: raise Exception('please set io_manager and location arguments ') if read_first: # In case the saved file has more metadata than we have here, # read it in first. But any changes to the in-memory copy should # override the saved file data. memory_metadata = data_set.metadata data_set.metadata = {} self.read_metadata(data_set) deep_update(data_set.metadata, memory_metadata) log.info('writing metadata to file %s' % self._metadata_file) fn = io_manager.join(location, self._metadata_file) with io_manager.open(fn, 'w', encoding='utf8') as snap_file: hickle.dump(data_set.metadata, snap_file)
def write_metadata(self, data_set: 'DataSet', io_manager, location, read_first=True, **kwargs): """ Write all metadata in this DataSet to storage. Args: data_set: the data we're storing io_manager (io_manager): the base location to write to location (str): the file location within io_manager read_first (Optional[bool]): read previously saved metadata before writing? The current metadata will still be the used if there are changes, but if the saved metadata has information not present in the current metadata, it will be retained. Default True. """ if read_first: # In case the saved file has more metadata than we have here, # read it in first. But any changes to the in-memory copy should # override the saved file data. memory_metadata = data_set.metadata data_set.metadata = {} self.read_metadata(data_set) deep_update(data_set.metadata, memory_metadata) fn = io_manager.join(location, self.metadata_file) with io_manager.open(fn, 'w', encoding='utf8') as snap_file: json.dump(data_set.metadata, snap_file, sort_keys=False, indent=4, ensure_ascii=False, cls=NumpyJSONEncoder)