def intdiv(nom: Union[int, float], div: Union[int, float]) -> int: """Return an integer result of division The division is expected to be exact and ensures an integer return rather than float. Code execution will exit if division is not exact Parameters ---------- nom : int, float Nominator div : int, float Divisor Returns ------- out : int Result of division """ if nom % div == 0: return nom // div else: errorPrint( "utilsMath::intdiv", "intdiv assumes exits upon having a remainder to make sure errors are not propagated through the code", quitRun=True, ) return 0
def loadConfig(filepath: str = "") -> ConfigObj: """Get configuration information Parameters ---------- filepath : str, optional The path to the configuration file Returns ------- config : ConfigObj ConfigObj with global configuration parameters """ configFile = getDefaultConfigFilepath() if filepath == "" or not checkFilepath(filepath): config = ConfigObj(configspec=configFile) else: config = ConfigObj(filepath, configspec=configFile) generalPrint("loadConfig", "Loading configuration file {:s}".format(filepath)) validator = Validator() result = config.validate(validator) if not result: errorPrint("loadConfigFile", "Config file validation failed", quitRun=True) return config
def projectError(errorStr: str, quitRun: bool = False) -> None: """Error print to terminal and possibly quit Parameters ---------- errorStr : str The string to print to the console quitRun : bool, optional (False) If True, the code will exit """ errorPrint("{} Warning".format(inspect.stack()[1][3]), errorStr, quitRun=quitRun)
def printError(self, errorStr: str, quitRun: bool = False) -> None: """Error print to terminal and possibly quit Parameters ---------- errorStr : str The string to print to the console quitRun : bool, optional (False) If True, the code will exit """ errorPrint("{} Error".format(self.__class__.__name__), errorStr, quitRun=quitRun)
def getDefaultConfigFilepath() -> str: """Get the default global configuration option Returns ------- str Path to global config file """ # use relative path from here path = os.path.split(__file__)[0] globalConfigFile = os.path.join(path, "..", "resisticsConfig.ini") if not checkFilepath(globalConfigFile): errorPrint( "getDefaultConfig", "Default configuration file could not be found", quitRun=True, ) return globalConfigFile
def fillGap(timeData1, timeData2): """Fill gap between time series Fill gaps between two different recordings. The intent is to fill the gap when recording has been interrupted and there are two data files. Both times series must have the same sampling frequency. Parameters ---------- timeDat1 : TimeData Time series data timeData2 : TimeData Time series data Returns ------- TimeData Time series data with gap filled """ if timeData1.sampleFreq != timeData2.sampleFreq: errorPrint( "fillGap", "fillGap requires both timeData objects to have the same sample rate", quitRun=True, ) return False sampleFreq = timeData1.sampleFreq sampleRate = 1.0 / sampleFreq timeDataFirst = timeData1 timeDataSecond = timeData2 if timeData1.startTime > timeData2.stopTime: timeDataFirst = timeData2 timeDataSecond = timeData1 # now want to do a simple interpolation between timeDataFirst and timeDataSecond # recall, these times are inclusive, so want to do the samples in between # this is mostly for clarity of programming gapStart = timeDataFirst.stopTime + timedelta(seconds=sampleRate) gapEnd = timeDataSecond.startTime - timedelta(seconds=sampleRate) # calculate number of samples in the gap numSamplesGap = ( int(round((gapEnd - gapStart).total_seconds() * sampleFreq)) + 1 ) # add 1 because inclusive # now want to interpolate newData = {} for chan in timeDataFirst.chans: startVal = timeDataFirst.data[chan][-1] endVal = timeDataSecond.data[chan][0] increment = 1.0 * (endVal - startVal) / (numSamplesGap + 2) fillData = np.zeros(shape=(numSamplesGap), dtype=timeDataFirst.data[chan].dtype) for i in range(0, numSamplesGap): fillData[i] = startVal + (i + 1) * increment newData[chan] = np.concatenate( [timeDataFirst.data[chan], fillData, timeDataSecond.data[chan]]) # return a new time data object # deal with the comment comment = (["-----------------------------", "TimeData1 comments"] + timeDataFirst.comments + ["-----------------------------", "TimeData2 comments"] + timeDataSecond.comments) comment += ["-----------------------------" ] + ["Gap filled from {} to {}".format(gapStart, gapEnd)] return TimeData( sampleFreq=sampleFreq, startTime=timeDataFirst.startTime, stopTime=timeDataSecond.stopTime, data=newData, comments=comment, )
def getStatElements(stat: str) -> List[str]: """Get statistic elements for each statistic Parameters ---------- stat : str The statistic for which to get the statistic elements Returns ------- Dict[str, List[str]] Mapping from a statistic name to the elements of that statistic """ statElements = { "absvalEqn": [ "absExEx", "absHyEx", "absExEy", "absHyEy", "absExHx", "absHyHx", "absExHy", "absHyHy", "absEyEx", "absHxEx", "absEyEy", "absHxEy", "absEyHx", "absHxHx", "absEyHy", "absHxHy", ], "coherence": ["cohExHx", "cohExHy", "cohEyHx", "cohEyHy"], "powerSpectralDensity": ["psdEx", "psdEy", "psdHx", "psdHy"], "polarisationDirection": ["polExEy", "polHxHy"], "transferFunction": [ "ExHxReal", "ExHxImag", "ExHyReal", "ExHyImag", "EyHxReal", "EyHxImag", "EyHyReal", "EyHyImag", ], "resPhase": [ "ExHxRes", "ExHxPhase", "ExHyRes", "ExHyPhase", "EyHxRes", "EyHxPhase", "EyHyRes", "EyHyPhase", ], "partialCoherence": [ "bivarEx", "bivarEy", "parExHx", "parExHy", "parEyHx", "parEyHy", ], } # remote reference stat elements statElementsRR = { "RR_coherence": [ "ExHxRR", "ExHyRR", "EyHxRR", "EyHyRR", "HxHxRR", "HxHyRR", "HyHxRR", "HyHyRR", ], "RR_coherenceEqn": ["ExHxR-HyHxR", "ExHyR-HyHyR", "EyHxR-HxHxR", "EyHyR-HxHyR"], "RR_absvalEqn": [ "absHyHxR", "absExHxR", "absHyHyR", "absExHyR", "absHxHxR", "absEyHxR", "absHxHyR", "absEyHyR", ], "RR_transferFunction": [ "ExHxRealRR", "ExHxImagRR", "ExHyRealRR", "ExHyImagRR", "EyHxRealRR", "EyHxImagRR", "EyHyRealRR", "EyHyImagRR", ], "RR_resPhase": [ "ExHxResRR", "ExHxPhaseRR", "ExHyResRR", "ExHyPhaseRR", "EyHxResRR", "EyHxPhaseRR", "EyHyResRR", "EyHyPhaseRR", ], } if stat in statElements: return statElements[stat] if stat in statElementsRR: return statElementsRR[stat] errorPrint( "utilsStats::getStatElements", "Statistic {} not found".format(stat), quitRun=True, ) return False
def measB423EHeaders( datapath: str, sampleFreq: float, ex: str = "E1", ey: str = "E2", dx: float = 1, dy: float = 1, ) -> None: """Read a single B423 measurement directory and construct headers Parameters ---------- site : str The path to the site sampleFreq : float The sampling frequency of the data ex : str, optional The channel E1, E2, E3 or E4 in the data that represents Ex. Default E1. ey : str, optional The channel E1, E2, E3 or E4 in the data that represents Ey. Default E2. dx : float, optional Distance between x electrodes dy : float, optional Distance between y electrodes """ from resistics.utilities.utilsPrint import generalPrint, warningPrint, errorPrint from resistics.ioHandlers.dataWriter import DataWriter dataFiles = glob.glob(os.path.join(datapath, "*.B423")) dataFilenames = [os.path.basename(dFile) for dFile in dataFiles] starts = [] stops = [] cumSamples = 0 for idx, dFile in enumerate(dataFiles): generalPrint("constructB423EHeaders", "Reading data file {}".format(dFile)) dataHeaders, firstDatetime, lastDatetime, numSamples = readB423Params( dFile, sampleFreq, 1024, 26) print(dataHeaders) generalPrint( "constructB423EHeaders", "start time = {}, end time = {}".format(firstDatetime, lastDatetime), ) generalPrint("constructB423EHeaders", "number of samples = {}".format(numSamples)) cumSamples += numSamples starts.append(firstDatetime) stops.append(lastDatetime) # now need to search for any missing data sampleTime = timedelta(seconds=1.0 / sampleFreq) # sort by start times sortIndices = sorted(list(range(len(starts))), key=lambda k: starts[k]) check = True for i in range(1, len(dataFiles)): # get the stop time of the previous dataset stopTimePrev = stops[sortIndices[i - 1]] startTimeNow = starts[sortIndices[i]] if startTimeNow != stopTimePrev + sampleTime: warningPrint("constructB423EHeaders", "There is a gap between the datafiles") warningPrint( "constructB423EHeaders", "Please separate out datasets with gaps into separate folders", ) warningPrint("constructB423EHeaders", "Gap found between datafiles:") warningPrint("constructB423EHeaders", "1. {}".format(dataFiles[sortIndices[i - 1]])) warningPrint("constructB423EHeaders", "2. {}".format(dataFiles[sortIndices[i]])) check = False # if did not pass check, then exit if not check: errorPrint( "constructB423EHeaders", "All data for a single recording must be continuous.", quitRun=True, ) # time of first and last sample datetimeStart = starts[sortIndices[0]] datetimeStop = stops[sortIndices[-1]] # global headers globalHeaders = { "sample_freq": sampleFreq, "num_samples": cumSamples, "start_time": datetimeStart.strftime("%H:%M:%S.%f"), "start_date": datetimeStart.strftime("%Y-%m-%d"), "stop_time": datetimeStop.strftime("%H:%M:%S.%f"), "stop_date": datetimeStop.strftime("%Y-%m-%d"), "meas_channels": 4, } writer = DataWriter() globalHeaders = writer.setGlobalHeadersFromKeywords({}, globalHeaders) # channel headers channels = ["E1", "E2", "E3", "E4"] chanMap = {"E1": 0, "E2": 1, "E3": 2, "E4": 3} sensors = {"E1": "0", "E2": "0", "E3": "0", "E4": "0"} posX2 = {"E1": 1, "E2": 1, "E3": 1, "E4": 1} posY2 = {"E1": 1, "E2": 1, "E3": 1, "E4": 1} posX2[ex] = dx posY2[ey] = dy chanHeaders = [] for chan in channels: # sensor serial number cHeader = dict(globalHeaders) cHeader["ats_data_file"] = ", ".join(dataFilenames) if ex == chan: cHeader["channel_type"] = "Ex" elif ey == chan: cHeader["channel_type"] = "Ey" else: cHeader["channel_type"] = chan cHeader["scaling_applied"] = False cHeader["ts_lsb"] = 1 cHeader["gain_stage1"] = 1 cHeader["gain_stage2"] = 1 cHeader["hchopper"] = 0 cHeader["echopper"] = 0 cHeader["pos_x1"] = 0 cHeader["pos_x2"] = posX2[chan] cHeader["pos_y1"] = 0 cHeader["pos_y2"] = posY2[chan] cHeader["pos_z1"] = 0 cHeader["pos_z2"] = 1 cHeader["sensor_sernum"] = sensors[chan] chanHeaders.append(cHeader) chanHeaders = writer.setChanHeadersFromKeywords(chanHeaders, {}) writer.setOutPath(datapath) writer.writeHeaders(globalHeaders, channels, chanMap, chanHeaders, rename=False, ext="h423E")
def readB423Params(dataFile: str, sampleFreq: float, dataByteOffset: int, recordByteSize: int): """Get the parameters of the B423 data file Parameters ---------- dataFile : str The data file as a string sampleFreq : float The sampling frequency in Hz dataByteOffset : int The offset till the start of the data in bytes recordByteSize : int The size in bytes of a record Returns ------- headers : dict The header values as a dictionary firstDatetime : datetime The time of the first sample in the data file lastDatetime : datetime The time of the last sample in the data file numSamples : int The number of samples in the data file given the sampling frequency """ from resistics.utilities.utilsPrint import errorPrint filesize = os.path.getsize(dataFile) numSamples: float = (filesize - dataByteOffset) / recordByteSize if not numSamples.is_integer(): errorPrint( "readB423Params", "Non-integer number of samples. Maybe the sampling frequency is incorrect", quitRun=True, ) else: numSamples = int(numSamples) f = open(dataFile, "rb") hdrBytes = f.read(dataByteOffset) dataHeaders = readB423Header(hdrBytes) # read the first record and get the timestamp bts = f.read(6) firstTimestamp = struct.unpack("L", bts[0:4])[0] firstSampleNum = struct.unpack("H", bts[4:])[0] firstDatetime = datetime.utcfromtimestamp(firstTimestamp + (firstSampleNum / sampleFreq)) # now seek to the end f.seek(filesize - recordByteSize) bts = f.read(6) lastTimestamp = struct.unpack("L", bts[0:4])[0] lastSampleNum = struct.unpack("H", bts[4:])[0] lastDatetime = datetime.utcfromtimestamp(lastTimestamp + (lastSampleNum / sampleFreq)) # now check number of samples based on datetimes numSeconds = np.timedelta64(lastDatetime - firstDatetime) / np.timedelta64( 1, "s") # +1 to numSamplesCalc because needs to be inclusive of the end numSamplesCalc = int(numSeconds * sampleFreq) + 1 if numSamplesCalc != numSamples: errorPrint("readB423Params", "There is a gap in data file {}".format(dataFile)) errorPrint("readB423Params", "No gaps allowed within data files") errorPrint("readB423Params", "Please remove this file from the recording", quitRun=True) return dataHeaders, firstDatetime, lastDatetime, numSamples