def loadConfig(filepath: str = "") -> ConfigObj: """Get configuration information Parameters ---------- filepath : str, optional The path to the configuration file Returns ------- config : ConfigObj ConfigObj with global configuration parameters """ configFile = getDefaultConfigFilepath() if filepath == "" or not checkFilepath(filepath): config = ConfigObj(configspec=configFile) else: config = ConfigObj(filepath, configspec=configFile) generalPrint("loadConfig", "Loading configuration file {:s}".format(filepath)) validator = Validator() result = config.validate(validator) if not result: errorPrint("loadConfigFile", "Config file validation failed", quitRun=True) return config
def projectText(infoStr: str) -> None: """General print to terminal Parameters ---------- infoStr : str The string to print to the console """ generalPrint("{} Info".format(inspect.stack()[1][3]), infoStr)
def printText(self, infoStr: str) -> None: """General print to terminal Parameters ---------- infoStr : str The string to print to the console """ generalPrint("{} Info".format(self.__class__.__name__), infoStr)
def checkFilepath(path: str) -> bool: """Check if file exists TODO: Should check that it is actually a file Parameters ---------- path : str Filepath to check Returns ------- out : bool True if file exists """ if not os.path.exists(path): generalPrint("utilsio::checkFilepath", "File path {} could not be found.".format(path)) return False return True
def downsampleData(data: Dict[str, np.ndarray], downsampleFactor: int) -> Dict[str, np.ndarray]: """Decimate array data Parameters ---------- data : Dict Dictionary with channel as keys and data as values downsampleFactor : int The factor to downsample the data by Returns ------- data : Dict Dictionary with channel as keys and data as values """ # if downsampleFactor is 1, nothing to do if downsampleFactor == 1: return data # a downsample factor should not be greater than 13 # hence factorise downsampleFactors that are greater than this if downsampleFactor > 13: downsamples = factorise(downsampleFactor) generalPrint( "Decimation", "Downsample factor {} greater than 13. Downsampling will be performed in multiple steps of {}".format( downsampleFactor, arrayToStringInt(downsamples) ), ) else: downsamples = [downsampleFactor] # downsample for each factor in downsamples for factor in downsamples: for c in data: data[c] = signal.decimate(data[c], factor, zero_phase=True) return data
def parseKeywords(default: Dict[str, Any], keywords: Dict[str, Any], printkw: bool = True): """General print to terminal Parameters ---------- default : Dict[str, Any] Dictionary of default parameters keywords : Dict[str, Any] Dictionary of optional keywords printkw : bool Print out the keywords """ for w in default: if w in keywords: default[w] = keywords[w] if printkw: generalPrint( "{}::utilsCheck::parseKeywords".format(inspect.stack()[1][3]), str(default)) return default
def measB423EHeaders( datapath: str, sampleFreq: float, ex: str = "E1", ey: str = "E2", dx: float = 1, dy: float = 1, ) -> None: """Read a single B423 measurement directory and construct headers Parameters ---------- site : str The path to the site sampleFreq : float The sampling frequency of the data ex : str, optional The channel E1, E2, E3 or E4 in the data that represents Ex. Default E1. ey : str, optional The channel E1, E2, E3 or E4 in the data that represents Ey. Default E2. dx : float, optional Distance between x electrodes dy : float, optional Distance between y electrodes """ from resistics.utilities.utilsPrint import generalPrint, warningPrint, errorPrint from resistics.ioHandlers.dataWriter import DataWriter dataFiles = glob.glob(os.path.join(datapath, "*.B423")) dataFilenames = [os.path.basename(dFile) for dFile in dataFiles] starts = [] stops = [] cumSamples = 0 for idx, dFile in enumerate(dataFiles): generalPrint("constructB423EHeaders", "Reading data file {}".format(dFile)) dataHeaders, firstDatetime, lastDatetime, numSamples = readB423Params( dFile, sampleFreq, 1024, 26) print(dataHeaders) generalPrint( "constructB423EHeaders", "start time = {}, end time = {}".format(firstDatetime, lastDatetime), ) generalPrint("constructB423EHeaders", "number of samples = {}".format(numSamples)) cumSamples += numSamples starts.append(firstDatetime) stops.append(lastDatetime) # now need to search for any missing data sampleTime = timedelta(seconds=1.0 / sampleFreq) # sort by start times sortIndices = sorted(list(range(len(starts))), key=lambda k: starts[k]) check = True for i in range(1, len(dataFiles)): # get the stop time of the previous dataset stopTimePrev = stops[sortIndices[i - 1]] startTimeNow = starts[sortIndices[i]] if startTimeNow != stopTimePrev + sampleTime: warningPrint("constructB423EHeaders", "There is a gap between the datafiles") warningPrint( "constructB423EHeaders", "Please separate out datasets with gaps into separate folders", ) warningPrint("constructB423EHeaders", "Gap found between datafiles:") warningPrint("constructB423EHeaders", "1. {}".format(dataFiles[sortIndices[i - 1]])) warningPrint("constructB423EHeaders", "2. {}".format(dataFiles[sortIndices[i]])) check = False # if did not pass check, then exit if not check: errorPrint( "constructB423EHeaders", "All data for a single recording must be continuous.", quitRun=True, ) # time of first and last sample datetimeStart = starts[sortIndices[0]] datetimeStop = stops[sortIndices[-1]] # global headers globalHeaders = { "sample_freq": sampleFreq, "num_samples": cumSamples, "start_time": datetimeStart.strftime("%H:%M:%S.%f"), "start_date": datetimeStart.strftime("%Y-%m-%d"), "stop_time": datetimeStop.strftime("%H:%M:%S.%f"), "stop_date": datetimeStop.strftime("%Y-%m-%d"), "meas_channels": 4, } writer = DataWriter() globalHeaders = writer.setGlobalHeadersFromKeywords({}, globalHeaders) # channel headers channels = ["E1", "E2", "E3", "E4"] chanMap = {"E1": 0, "E2": 1, "E3": 2, "E4": 3} sensors = {"E1": "0", "E2": "0", "E3": "0", "E4": "0"} posX2 = {"E1": 1, "E2": 1, "E3": 1, "E4": 1} posY2 = {"E1": 1, "E2": 1, "E3": 1, "E4": 1} posX2[ex] = dx posY2[ey] = dy chanHeaders = [] for chan in channels: # sensor serial number cHeader = dict(globalHeaders) cHeader["ats_data_file"] = ", ".join(dataFilenames) if ex == chan: cHeader["channel_type"] = "Ex" elif ey == chan: cHeader["channel_type"] = "Ey" else: cHeader["channel_type"] = chan cHeader["scaling_applied"] = False cHeader["ts_lsb"] = 1 cHeader["gain_stage1"] = 1 cHeader["gain_stage2"] = 1 cHeader["hchopper"] = 0 cHeader["echopper"] = 0 cHeader["pos_x1"] = 0 cHeader["pos_x2"] = posX2[chan] cHeader["pos_y1"] = 0 cHeader["pos_y2"] = posY2[chan] cHeader["pos_z1"] = 0 cHeader["pos_z2"] = 1 cHeader["sensor_sernum"] = sensors[chan] chanHeaders.append(cHeader) chanHeaders = writer.setChanHeadersFromKeywords(chanHeaders, {}) writer.setOutPath(datapath) writer.writeHeaders(globalHeaders, channels, chanMap, chanHeaders, rename=False, ext="h423E")