def transform_cube(self, cube: xr.Dataset, gm: GridMapping, cube_config: CubeConfig) -> TransformedCube: history = cube.attrs.get('history') if isinstance(history, str): history = [history] elif isinstance(history, (list, tuple)): history = list(history) else: history = [] history.append( dict( program=f'xcube gen2, version {version}', cube_config=cube_config.to_dict(), )) cube = cube.assign_attrs( Conventions='CF-1.7', history=history, date_created=pd.Timestamp.now().isoformat(), # TODO: adjust temporal metadata too **get_geospatial_attrs(gm)) if cube_config.metadata: self._check_for_self_destruction(cube_config.metadata) cube.attrs.update(cube_config.metadata) if cube_config.variable_metadata: for var_name, metadata in cube_config.variable_metadata.items(): if var_name in cube.variables and metadata: cube[var_name].attrs.update(metadata) return cube, gm, cube_config
def postprocess_final(self, data: xr.Dataset, scan_desc: dict = None): original_filename = scan_desc.get('path', scan_desc.get('file')) internal_match = re.match(r'([a-zA-Z0-9\w+_]+_[0-9][0-9][0-9])\.pxt', Path(original_filename).name) all_filenames = find_kaindl_files_associated(Path(original_filename)) all_filenames = [ '{}\\{}_AI.txt'.format(f.parent, f.stem) for f in all_filenames ] def load_attr_for_frame(filename, attr_name): df = pd.read_csv(filename, sep='\t', skiprows=6) return np.mean(df[attr_name]) def attach_attr(data, attr_name, as_name): photocurrents = np.array( [load_attr_for_frame(f, attr_name) for f in all_filenames]) if len(photocurrents) == 1: data[as_name] = photocurrents[0] else: non_spectrometer_dims = [ d for d in data.spectrum.dims if d not in {'eV', 'phi'} ] non_spectrometer_coords = { c: v for c, v in data.spectrum.coords.items() if c in non_spectrometer_dims } new_shape = [ len(data.coords[d]) for d in non_spectrometer_dims ] photocurrent_arr = xr.DataArray( photocurrents.reshape(new_shape), coords=non_spectrometer_coords, dims=non_spectrometer_dims) data = xr.merge( [data, xr.Dataset(dict([[as_name, photocurrent_arr]]))]) return data try: data = attach_attr(data, 'Photocurrent', 'photocurrent') data = attach_attr(data, 'Temperature B', 'temp') data = attach_attr(data, 'Temperature A', 'cryotip_temp') except FileNotFoundError as e: print(e) if internal_match.groups(): attrs_path = str( Path(original_filename).parent / '{}_AI.txt'.format(internal_match.groups()[0])) try: extra = pd.read_csv(attrs_path, sep='\t', skiprows=6) data = data.assign_attrs(extra=extra.to_json()) except Exception: # WELP we tried pass deg_to_rad_coords = {'theta', 'beta', 'phi'} for c in deg_to_rad_coords: if c in data.dims: data.coords[c] = data.coords[c] * np.pi / 180 deg_to_rad_attrs = {'theta', 'beta', 'alpha', 'chi'} for angle_attr in deg_to_rad_attrs: if angle_attr in data.attrs: data.attrs[angle_attr] = float( data.attrs[angle_attr]) * np.pi / 180 ls = [data] + data.S.spectra for l in ls: l.coords['x'] = np.nan l.coords['y'] = np.nan l.coords['z'] = np.nan data = super().postprocess_final(data, scan_desc) return data
def _preprocess_dataset(cls, ds: xr.Dataset) -> xr.Dataset: return ds.assign_attrs(marker=True)
def postprocess(self, frame: xr.Dataset): import arpes.xarray_extensions # pylint: disable=unused-import, redefined-outer-name frame = super().postprocess(frame) return frame.assign_attrs(frame.S.spectrum.attrs)