예제 #1
0
    def getChanChopper(self, chan) -> bool:
        """Get channel chopper
        
        The chopper is an amplifier present in some instruments. There might be different calibration files for chopper on or off.

        Returns
        -------
        bool
            Flag designating whether chopper is on or off
        """

        echopper = self.getChanHeader(chan, "echopper")
        hchopper = self.getChanHeader(chan, "hchopper")
        # return true if the chopper amplifier was on
        if isElectric(chan) and echopper:
            return True
        if isMagnetic(chan) and hchopper:
            return True
        return False
예제 #2
0
    def readHeaderXTR(self, headerFile: str) -> None:
        """Read a XTR header file

        The raw data for SPAM is in single precision Volts. However, if there are multiple data files for a single recording, each one may have a different gain. Therefore, a scaling has to be calculated for each data file and channel. This scaling will convert all channels to mV. 

        For the most part, this method only reads recording information. However, it does additionally calculate out the lsb scaling and store it in the ts_lsb channel header. More information is provided in the notes.

        Notes
        -----
        The raw data for SPAM is in single precision floats and record the raw Voltage measurements of the sensors. However, if there are multiple data files for a single continuous recording, each one may have a different gain. Therefore, a scaling has to be calculated for each data file. 

        For electric channels, the scaling begins with the scaling provided in the header file in the DATA section. This incorporates any gain occuring in the device. This scaling is further amended by a conversion to mV and polarity reversal,

        .. math::
        
            scaling = read scaling from DATA section of header file \\
            scaling = 1000 * scaling , \\
            scaling = -1000 * scaling , \\
            ts_lsb = scaling , 
        
        where the reason for the 1000 factor in line 2 is not clear, nor is the polarity reversal. However, this information was provided by people more familiar with the data format.
        
        For magnetic channels, the scaling in the header file DATA section is ignored. This is because it includes a static gain correction, which would be duplicated at the calibration stage. Therefore, this is not included at this point.

        .. math:: 
        
            scaling = -1000 , \\
            ts_lsb = scaling ,
        
        This scaling converts the magnetic data from V to mV.

        Parameters
        ----------
        headerFile : str
            The XTR header file to read in
        """

        with open(headerFile, "r") as f:
            lines = f.readlines()
        sectionLines = {}
        # let's get data
        for line in lines:
            line = line.strip()
            line = line.replace("'", " ")
            # continue if line is empty
            if line == "":
                continue
            if "[" in line:
                sec = line[1:-1]
                sectionLines[sec] = []
            else:
                sectionLines[sec].append(line)
        # the base class is built around a set of headers based on ATS headers
        # though this is a bit more work here, it saves lots of code repetition
        headers = {}
        # recording information (start_time, start_date, stop_time, stop_date, ats_data_file)
        fileLine = sectionLines["FILE"][0]
        fileSplit = fileLine.split()
        headers["sample_freq"] = np.absolute(float(fileSplit[-1]))
        timeLine = sectionLines["FILE"][2]
        timeSplit = timeLine.split()
        # these are the unix time stamps
        startDate = float(timeSplit[1] + "." + timeSplit[2])
        datetimeStart = datetime.utcfromtimestamp(startDate)
        stopDate = float(timeSplit[3] + "." + timeSplit[4])
        datetimeStop = datetime.utcfromtimestamp(stopDate)
        headers["start_date"] = datetimeStart.strftime("%Y-%m-%d")
        headers["start_time"] = datetimeStart.strftime("%H:%M:%S.%f")
        headers["stop_date"] = datetimeStop.strftime("%Y-%m-%d")
        headers["stop_time"] = datetimeStop.strftime("%H:%M:%S.%f")
        # here calculate number of samples
        deltaSeconds = (datetimeStop - datetimeStart).total_seconds()
        # calculate number of samples - have to add one because the time given in SPAM recording is the actual time of the last sample
        numSamples = int(deltaSeconds * headers["sample_freq"]) + 1
        # put these in headers for ease of future calculations in merge headers
        headers["num_samples"] = numSamples
        # spam datasets only have the one data file for all channels
        headers["ats_data_file"] = fileSplit[1]
        # data information (meas_channels, sample_freq)
        chanLine = sectionLines["CHANNAME"][0]
        # this gets reformatted to an int later
        headers["meas_channels"] = chanLine.split()[1]
        numChansInt = int(headers["meas_channels"])
        # deal with the channel headers
        chanHeaders = []
        for iChan in range(0, numChansInt):
            chanH = self.chanDefaults()
            # set the sample frequency from the main headers
            chanH["sample_freq"] = headers["sample_freq"]
            # line data - read through the data in the correct channel order
            chanLine = sectionLines["CHANNAME"][iChan + 1]
            chanSplit = chanLine.split()
            dataLine = sectionLines["DATA"][iChan + 1]
            dataSplit = dataLine.split()
            # channel input information (gain_stage1, gain_stage2, hchopper, echopper)
            chanH["gain_stage1"] = 1
            chanH["gain_stage2"] = 1
            # channel output information (sensor_type, channel_type, ts_lsb, pos_x1, pos_x2, pos_y1, pos_y2, pos_z1, pos_z2, sensor_sernum)
            chanH["ats_data_file"] = fileSplit[1]
            chanH["num_samples"] = numSamples

            # channel information
            # spams often use Bx, By - use H within the software as a whole
            chanH["channel_type"] = consistentChans(chanSplit[2])
            # the sensor number is a bit of a hack - want MFSXXe or something - add MFS in front of the sensor number - this is liable to break
            # at the same time, set the chopper
            calLine = sectionLines["200{}003".format(iChan + 1)][0]
            calSplit = calLine.split()
            if isMagnetic(chanH["channel_type"]):
                chanH["sensor_sernum"] = calSplit[
                    2]  # the last three digits is the serial number
                sensorType = calSplit[1].split("_")[1][-2:]
                chanH["sensor_type"] = "MFS{:02d}".format(int(sensorType))
                if "LF" in calSplit[1]:
                    chanH["hchopper"] = 1
            else:
                chanH["sensor_type"] = "ELC00"
                if "LF" in calLine:
                    chanH["echopper"] = 1

            # data is raw voltage of sensors
            # both E and H fields need polarity reversal (from email with Reinhard)
            # get scaling from headers
            scaling = float(dataSplit[-2])
            if isElectric(chanH["channel_type"]):
                # the factor of 1000 is not entirely clear
                lsb = 1000.0 * scaling
                # volts to millivolts and a minus to switch polarity giving data in mV
                lsb = -1000.0 * lsb
            else:
                # volts to millivolts and a minus to switch polarity giving data in mV
                # scaling in header file is ignored because it duplicates static gain correction in calibration
                lsb = -1000.0
            chanH["ts_lsb"] = lsb

            # the distances
            if chanSplit[2] == "Ex":
                chanH["pos_x1"] = float(dataSplit[4]) / 2
                chanH["pos_x2"] = chanH["pos_x1"]
            if chanSplit[2] == "Ey":
                chanH["pos_y1"] = float(dataSplit[4]) / 2
                chanH["pos_y2"] = chanH["pos_y1"]
            if chanSplit[2] == "Ez":
                chanH["pos_z1"] = float(dataSplit[4]) / 2
                chanH["pos_z2"] = chanH["pos_z1"]

            # append chanHeaders to the list
            chanHeaders.append(chanH)

        # check information from raw file headers
        self.headersFromRawFile(headers["ats_data_file"], headers)
        # return the headers and chanHeaders from this file
        return headers, chanHeaders
예제 #3
0
    def calibrate(
        self,
        timeData: TimeData,
        sensor: Dict[str, str],
        serial: Dict[str, int],
        chopper: Dict[str, bool],
    ) -> TimeData:
        """Calibrate time data

        For each channel in timeData, searches for a matching calibration file based on sensor type, serial number and chopper. If a calibration file is found, the channel is calibrated using the data in the file. If useTheoretical is False and no file is found, the data is not calibrated

        todo:
        If no calibration file is found and the channel is a magnetic data channel, a theoretical function can be used
    
        Parameters
        ----------
        timeData : TimeData
            TimeData object
        sensor : Dict
            Dictionary of sensor information with channels as the key and sensor as the value (sensor is a string)
        serial :
            Dictionary of serial information with channels as the key and sensor as the value (serial is a number)
        chopper :
            Dictionary of chopper information with channels as the key and sensor as the value (chopper is a bool)

        Returns
        -------
        timeData : TimeData
            Calibration TimeData object
        """

        calIO = CalibrationIO()
        # iterate over data
        for chan in timeData.chans:
            # output some info
            self.printText("Calibrating channel {}".format(chan))
            # try and find calibration file
            calFile, calFormat = self.getCalFile(
                sensor[chan], serial[chan], chopper[chan]
            )
            if calFile == "":
                # no file found
                if self.useTheoretical and isMagnetic(chan):
                    # use theoretical
                    calData = self.getTheoreticalCalData(sensor[chan])
                    timeData.data[chan] = self.calibrateChan(
                        timeData.data[chan], timeData.sampleFreq, calData
                    )
                    timeData.addComment(
                        "Channel {} calibrated with theoretical calibration function".format(
                            chan
                        )
                    )
                    continue
                else:
                    self.printText(
                        "No Calibration data found - channel will not be calibrated"
                    )
                    timeData.addComment("Channel {} not calibrated".format(chan))
                    continue  # nothing to do

            # else file found
            # no need to separately apply static gain, already included in cal data
            calIO.refresh(calFile, calFormat, chopper=chopper[chan], extend=self.extend)
            calData = calIO.read()
            self.printText(
                "Calibration file found for sensor {}, serial number {}, chopper {}: {}".format(
                    sensor[chan], serial[chan], chopper[chan], calFile
                )
            )
            self.printText("Format: {}".format(calFormat))
            self.printText(
                "Static gain correction of {} applied to calibration data".format(
                    calData.staticGain
                )
            )
            # calibrate time data
            timeData.data[chan] = self.calibrateChan(
                timeData.data[chan], timeData.sampleFreq, calData
            )
            timeData.addComment(
                "Channel {} calibrated with calibration data from file {}".format(
                    chan, calFile
                )
            )
        # return calibrated time data
        return timeData
예제 #4
0
    def headersFromTable(self, tableData: Dict) -> Tuple[Dict, List]:
        """Populate the headers from the table values
        
        Parameters
        ----------
        tableData : OrderedDictDict
            Ordered dictionary with table data
        
        Returns
        -------
        headers : Dict
            Dictionary of general headers
        chanHeaders : Dict
            List of channel headers
        """

        # initialise storage
        headers = {}
        chanHeaders = []
        # get the sample freqs for each ts file
        self.tsSampleFreqs = []
        for tsNum in self.tsNums:
            self.tsSampleFreqs.append(tableData["SRL{}".format(tsNum)])
        # for sample frequency, use the continuous channel
        headers["sample_freq"] = self.tsSampleFreqs[self.continuousI]
        # these are the unix time stamps
        firstDate, firstTime, lastDate, lastTime = self.getDates(tableData)
        # the start date is equal to the time of the first record
        headers["start_date"] = firstDate
        headers["start_time"] = firstTime
        datetimeStart = datetime.strptime("{} {}".format(firstDate, firstTime),
                                          "%Y-%m-%d %H:%M:%S.%f")
        # the stop date
        datetimeLast = datetime.strptime("{} {}".format(lastDate, lastTime),
                                         "%Y-%m-%d %H:%M:%S.%f")
        # records are usually equal to one second (beginning on 0 and ending on the last sample before the next 0)
        datetimeStop = datetimeLast + timedelta(
            seconds=(1.0 - 1.0 / headers["sample_freq"]))
        # put the stop date and time in the headers
        headers["stop_date"] = datetimeStop.strftime("%Y-%m-%d")
        headers["stop_time"] = datetimeStop.strftime("%H:%M:%S.%f")
        # here calculate number of samples
        deltaSeconds = (datetimeStop - datetimeStart).total_seconds()
        # calculate number of samples - have to add one because the time given in SPAM recording is the actual time of the last sample
        numSamples = round(deltaSeconds * headers["sample_freq"]) + 1
        headers["num_samples"] = numSamples
        headers["ats_data_file"] = self.continuousF
        # deal with the channel headers
        # now want to do this in the correct order
        # chan headers should reflect the order in the data
        chans = ["Ex", "Ey", "Hx", "Hy", "Hz"]
        chanOrder = []
        for chan in chans:
            chanOrder.append(tableData["CH{}".format(chan.upper())])
        # sort the lists in the right order based on chanOrder
        chanOrder, chans = (list(x) for x in zip(
            *sorted(zip(chanOrder, chans), key=lambda pair: pair[0])))
        for chan in chans:
            chanH = self.chanDefaults()
            # set the sample frequency from the main headers
            chanH["sample_freq"] = headers["sample_freq"]
            # channel output information (sensor_type, channel_type, ts_lsb, pos_x1, pos_x2, pos_y1, pos_y2, pos_z1, pos_z2, sensor_sernum)
            chanH["ats_data_file"] = self.dataF[self.continuousI]
            chanH["num_samples"] = numSamples
            # channel information
            chanH["channel_type"] = consistentChans(
                chan)  # consistent chan naming

            # magnetic channels only
            if isMagnetic(chanH["channel_type"]):
                chanH["sensor_sernum"] = tableData["{}SN".format(
                    chan.upper())][-4:]
                chanH["sensor_type"] = "Phoenix"
                # channel input information (gain_stage1, gain_stage2, hchopper, echopper)
                chanH["gain_stage1"] = tableData["HGN"]
                chanH["gain_stage2"] = 1

            # electric channels only
            if isElectric(chanH["channel_type"]):
                # the distances
                if chan == "Ex":
                    chanH["pos_x1"] = float(tableData["EXLN"]) / 2.0
                    chanH["pos_x2"] = chanH["pos_x1"]
                if chan == "Ey":
                    chanH["pos_y1"] = float(tableData["EYLN"]) / 2.0
                    chanH["pos_y2"] = chanH["pos_y1"]
                # channel input information (gain_stage1, gain_stage2, hchopper, echopper)
                chanH["gain_stage1"] = tableData["EGN"]
                chanH["gain_stage2"] = 1

            # append chanHeaders to the list
            chanHeaders.append(chanH)

        # data information (meas_channels, sample_freq)
        headers["meas_channels"] = len(
            chans)  # this gets reformatted to an int later
        # return the headers and chanHeaders from this file
        return headers, chanHeaders
예제 #5
0
def viewSpectraStack(
    projData: ProjectData, site: str, meas: str, **kwargs
) -> Union[plt.figure, None]:
    """View spectra stacks for a measurement

    Parameters
    ----------
    projData : projecData
        The project data
    site : str
        The site to view
    meas: str
        The measurement of the site to view
    chans : List[str], optional
        Channels to plot
    declevel : int, optional
        Decimation level to plot
    numstacks : int, optional
        The number of windows to stack
    coherences : List[List[str]], optional
        A list of coherences to add, specified as [["Ex", "Hy"], ["Ey", "Hx"]] 
    specdir : str, optional
        String that specifies spectra directory for the measurement
    show : bool, optional
        Show the spectra plot
    save : bool, optional
        Save the plot to the images directory
    plotoptions : Dict, optional
        Dictionary of plot options
    
    Returns
    -------
    matplotlib.pyplot.figure or None
        A matplotlib figure unless the plot is not shown and is saved, in which case None and the figure is closed.
    """

    options = {}
    options["chans"] = []
    options["declevel"] = 0
    options["numstacks"] = 10
    options["coherences"] = []
    options["specdir"] = projData.config.configParams["Spectra"]["specdir"]
    options["show"] = True
    options["save"] = False
    options["plotoptions"] = plotOptionsSpec()
    options = parseKeywords(options, kwargs)

    projectText(
        "Plotting spectra stack for measurement {} and site {}".format(meas, site)
    )
    specReader = getSpecReader(projData, site, meas, **options)

    # channels
    dataChans = specReader.getChannels()
    if len(options["chans"]) > 0:
        dataChans = options["chans"]
    numChans = len(dataChans)

    # get windows
    numWindows = specReader.getNumWindows()
    sampleFreqDec = specReader.getSampleFreq()
    f = specReader.getFrequencyArray()

    # calculate num of windows to stack in each set
    stackSize = int(np.floor(1.0 * numWindows / options["numstacks"]))

    # calculate number of rows - in case interested in coherences too
    nrows = (
        2
        if len(options["coherences"]) == 0
        else 2 + np.ceil(1.0 * len(options["coherences"]) / numChans)
    )

    # setup the figure
    plotfonts = options["plotoptions"]["plotfonts"]
    cmap = colorbarMultiline()
    fig = plt.figure(figsize=options["plotoptions"]["figsize"])
    st = fig.suptitle(
        "Spectra stack, fs = {:.6f} [Hz], decimation level = {:2d}, windows in each set = {:d}".format(
            sampleFreqDec, options["declevel"], stackSize
        ),
        fontsize=plotfonts["suptitle"],
    )
    st.set_y(0.98)

    # do the stacking
    for iP in range(0, options["numstacks"]):
        stackStart = iP * stackSize
        stackStop = min(stackStart + stackSize, numWindows)
        color = cmap(iP/options["numstacks"])
        # dictionaries to hold data for this section
        stackedData = {}
        ampData = {}
        phaseData = {}
        powerData = {}

        # assign initial zeros
        for c in dataChans:
            stackedData[c] = np.zeros(shape=(specReader.getDataSize()), dtype="complex")
            ampData[c] = np.zeros(shape=(specReader.getDataSize()), dtype="complex")
            phaseData[c] = np.zeros(shape=(specReader.getDataSize()), dtype="complex")
            for c2 in dataChans:
                powerData[c + c2] = np.zeros(
                    shape=(specReader.getDataSize()), dtype="complex"
                )

        # now stack the data and create nice plots
        for iW in range(stackStart, stackStop):
            winData = specReader.readBinaryWindowLocal(iW)
            for c in dataChans:
                stackedData[c] += winData.data[c]
                ampData[c] += np.absolute(winData.data[c])
                phaseData[c] += np.angle(winData.data[c]) * (180.0 / np.pi)
                # get coherency data
                for c2 in dataChans:
                    powerData[c + c2] += winData.data[c] * np.conjugate(
                        winData.data[c2]
                    )
            if iW == stackStart:
                startTime = winData.startTime
            if iW == stackStop - 1:
                stopTime = winData.stopTime

        # scale powers and stacks
        ampLim = options["plotoptions"]["amplim"]
        for idx, c in enumerate(dataChans):
            stackedData[c] = stackedData[c] / (stackStop - stackStart)
            ampData[c] = ampData[c] / (stackStop - stackStart)
            phaseData[c] = phaseData[c] / (stackStop - stackStart)
            for c2 in dataChans:
                # normalisation
                powerData[c + c2] = 2 * powerData[c + c2] / (stackStop - stackStart)
                # normalisation
                powerData[c + c2][[0, -1]] = powerData[c + c2][[0, -1]] / 2

            # plot
            ax1 = plt.subplot(nrows, numChans, idx + 1)
            plt.title("Amplitude {}".format(c), fontsize=plotfonts["title"])
            h = ax1.semilogy(
                f,
                ampData[c],
                color=color,
                label="{} to {}".format(
                    startTime.strftime("%m-%d %H:%M:%S"),
                    stopTime.strftime("%m-%d %H:%M:%S"),
                ),
            )
            if len(ampLim) > 2:
                ax1.set_ylim(ampLim)
            else:
                ax1.set_ylim(0.01, 1000)
            ax1.set_xlim(0, sampleFreqDec / 2.0)
            if isMagnetic(c):
                ax1.set_ylabel("Amplitude [nT]", fontsize=plotfonts["axisLabel"])
            else:
                ax1.set_ylabel("Amplitude [mV/km]", fontsize=plotfonts["axisLabel"])
            ax1.set_xlabel("Frequency [Hz]", fontsize=plotfonts["axisLabel"])
            plt.grid(True)

            # set tick sizes
            for label in ax1.get_xticklabels() + ax1.get_yticklabels():
                label.set_fontsize(plotfonts["axisTicks"])
            # plot phase
            ax2 = plt.subplot(nrows, numChans, numChans + idx + 1)
            plt.title("Phase {}".format(c), fontsize=plotfonts["title"])
            ax2.plot(
                f,
                phaseData[c],
                color=color,                
                label="{} to {}".format(
                    startTime.strftime("%m-%d %H:%M:%S"),
                    stopTime.strftime("%m-%d %H:%M:%S"),
                ),
            )
            ax2.set_ylim(-180, 180)
            ax2.set_xlim(0, sampleFreqDec / 2.0)
            ax2.set_ylabel("Phase [degrees]", fontsize=plotfonts["axisLabel"])
            ax2.set_xlabel("Frequency [Hz]", fontsize=plotfonts["axisLabel"])
            plt.grid(True)
            # set tick sizes
            for label in ax2.get_xticklabels() + ax2.get_yticklabels():
                label.set_fontsize(plotfonts["axisTicks"])

        # plot coherences
        for idx, coh in enumerate(options["coherences"]):
            c = coh[0]
            c2 = coh[1]
            cohNom = np.power(np.absolute(powerData[c + c2]), 2)
            cohDenom = powerData[c + c] * powerData[c2 + c2]
            coherence = cohNom / cohDenom
            ax = plt.subplot(nrows, numChans, 2 * numChans + idx + 1)
            plt.title("Coherence {} - {}".format(c, c2), fontsize=plotfonts["title"])
            ax.plot(
                f,
                coherence,
                color=color,
                label="{} to {}".format(
                    startTime.strftime("%m-%d %H:%M:%S"),
                    stopTime.strftime("%m-%d %H:%M:%S"),
                ),
            )
            ax.set_ylim(0, 1.1)
            ax.set_xlim(0, sampleFreqDec / 2)
            ax.set_ylabel("Coherence", fontsize=plotfonts["axisLabel"])
            ax.set_xlabel("Frequency [Hz]", fontsize=plotfonts["axisLabel"])
            plt.grid(True)
            # set tick sizes
            for label in ax.get_xticklabels() + ax.get_yticklabels():
                label.set_fontsize(plotfonts["axisTicks"])

    # fig legend and layout
    ax = plt.gca()
    h, l = ax.get_legend_handles_labels()
    fig.tight_layout(rect=[0.01, 0.01, 0.98, 0.81])
    # legend
    legax = plt.axes(position=[0.01, 0.82, 0.98, 0.12], in_layout=False)
    plt.tick_params(left=False, labelleft=False, bottom=False, labelbottom="False")
    plt.box(False)
    legax.legend(h, l, ncol=4, loc="upper center", fontsize=plotfonts["legend"])

    # plot show and save
    if options["save"]:
        impath = projData.imagePath
        filename = "spectraStack_{}_{}_dec{}_{}".format(
            site, meas, options["declevel"], options["specdir"]
        )
        savename = savePlot(impath, filename, fig)
        projectText("Image saved to file {}".format(savename))
    if options["show"]:
        plt.show(block=options["plotoptions"]["block"])
    if not options["show"] and options["save"]:
        plt.close(fig)
        return None
    return fig
예제 #6
0
def viewSpectraSection(
    projData: ProjectData, site: str, meas: str, **kwargs
) -> Union[plt.figure, None]:
    """View spectra section for a measurement

    Parameters
    ----------
    projData : projecData
        The project data
    site : str
        The site to view
    meas: str
        The measurement of the site to view    
    chans : List[str], optional
        Channels to plot
    declevel : int, optional
        Decimation level to plot
    specdir : str, optional
        String that specifies spectra directory for the measurement
    show : bool, optional
        Show the spectra plot
    save : bool, optional
        Save the plot to the images directory
    plotoptions : Dict, optional
        Dictionary of plot options
    
    Returns
    -------
    matplotlib.pyplot.figure or None
        A matplotlib figure unless the plot is not shown and is saved, in which case None and the figure is closed.
    """

    options = {}
    options["chans"] = []
    options["declevel"] = 0
    options["specdir"] = projData.config.configParams["Spectra"]["specdir"]
    options["show"] = True
    options["save"] = False
    options["plotoptions"] = plotOptionsSpec()
    options = parseKeywords(options, kwargs)

    projectText(
        "Plotting spectra section for measurement {} and site {}".format(meas, site)
    )
    specReader = getSpecReader(projData, site, meas, **options)

    # channels
    dataChans = specReader.getChannels()
    if len(options["chans"]) > 0:
        dataChans = options["chans"]

    # get windows
    numWindows = specReader.getNumWindows()
    sampleFreqDec = specReader.getSampleFreq()

    # freq array
    f = specReader.getFrequencyArray()

    # now if plotting a section, ignore plotwindow for now
    if numWindows > 250:
        windows = list(np.linspace(0, numWindows, 250, endpoint=False, dtype=np.int32))
    else:
        windows = np.arange(0, 250)

    # create figure
    plotfonts = options["plotoptions"]["plotfonts"]
    fig = plt.figure(figsize=options["plotoptions"]["figsize"])
    st = fig.suptitle(
        "Spectra section, site = {}, meas = {}, fs = {:.2f} [Hz], decimation level = {:2d}, windows = {:d}, {} to {}".format(
            site,
            meas,
            sampleFreqDec,
            options["declevel"],
            len(windows),
            windows[0],
            windows[-1],
        ),
        fontsize=plotfonts["suptitle"],
    )
    st.set_y(0.98)

    # collect the data
    specData = np.empty(
        shape=(len(windows), len(dataChans), specReader.getDataSize()), dtype="complex"
    )
    dates = []
    for idx, iW in enumerate(windows):
        winData = specReader.readBinaryWindowLocal(iW)
        for cIdx, chan in enumerate(dataChans):
            specData[idx, cIdx, :] = winData.data[chan]
        dates.append(winData.startTime)

    ampLim = options["plotoptions"]["amplim"]
    for idx, chan in enumerate(dataChans):
        ax = plt.subplot(1, len(dataChans), idx + 1)
        plotData = np.transpose(np.absolute(np.squeeze(specData[:, idx, :])))
        if len(ampLim) == 2:
            plt.pcolor(
                dates,
                f,
                plotData,
                norm=LogNorm(vmin=ampLim[0], vmax=ampLim[1]),
                cmap=colorbar2dSpectra(),
            )
        else:
            plt.pcolor(
                dates,
                f,
                plotData,
                norm=LogNorm(vmin=plotData.min(), vmax=plotData.max()),
                cmap=colorbar2dSpectra(),
            )
        cb = plt.colorbar()
        cb.ax.tick_params(labelsize=plotfonts["axisTicks"])
        # set axis limits
        ax.set_ylim(0, specReader.getSampleFreq() / 2.0)
        ax.set_xlim([dates[0], dates[-1]])
        if isMagnetic(chan):
            plt.title("Amplitude {} [nT]".format(chan), fontsize=plotfonts["title"])
        else:
            plt.title("Amplitude {} [mV/km]".format(chan), fontsize=plotfonts["title"])
        ax.set_ylabel("Frequency [Hz]", fontsize=plotfonts["axisLabel"])
        ax.set_xlabel("Time", fontsize=plotfonts["axisLabel"])
        # set tick sizes
        for label in ax.get_xticklabels() + ax.get_yticklabels():
            label.set_fontsize(plotfonts["axisTicks"])
        plt.grid(True)

    # plot format
    fig.autofmt_xdate(rotation=90, ha="center")
    fig.tight_layout(rect=[0.02, 0.02, 0.96, 0.92])

    # plot show and save
    if options["save"]:
        impath = projData.imagePath
        filename = "spectraSection_{}_{}_dec{}_{}".format(
            site, meas, options["declevel"], options["specdir"]
        )
        savename = savePlot(impath, filename, fig)
        projectText("Image saved to file {}".format(savename))
    if options["show"]:
        plt.show(block=options["plotoptions"]["block"])
    if not options["show"] and options["save"]:
        plt.close(fig)
        return None
    return fig
예제 #7
0
def measB423Headers(
    datapath: str,
    sampleFreq: float,
    hxSensor: int = 0,
    hySensor: int = 0,
    hzSensor: int = 0,
    hGain: int = 1,
    dx: float = 1,
    dy: float = 1,
) -> None:
    """Read a single B423 measurement directory and construct headers
    
    Parameters
    ----------
    datapath : str
        The path to the measurement
    sampleFreq : float
        The sampling frequency of the data
    hxSensor : str, optional
        The x direction magnetic sensor, used for calibration
    hySensor : str, optional
        The y direction magnetic sensor, used for calibration
    hzSensor : str, optional
        The z direction magnetic sensor, used for calibration
    hGain : int
        Any gain on the magnetic channels which will need to be removed
    dx : float, optional
        Distance between x electrodes
    dy : float, optional
        Distance between y electrodes
    """

    from resistics.utilities.utilsPrint import generalPrint, warningPrint, errorPrint
    from resistics.ioHandlers.dataWriter import DataWriter

    dataFiles = glob.glob(os.path.join(datapath, "*.B423"))
    dataFilenames = [os.path.basename(dFile) for dFile in dataFiles]
    starts = []
    stops = []
    cumSamples = 0
    for idx, dFile in enumerate(dataFiles):
        generalPrint("constructB423Headers",
                     "Reading data file {}".format(dFile))
        dataHeaders, firstDatetime, lastDatetime, numSamples = readB423Params(
            dFile, sampleFreq, 1024, 30)
        print(dataHeaders)
        generalPrint(
            "constructB423Headers",
            "start time = {}, end time = {}".format(firstDatetime,
                                                    lastDatetime),
        )
        generalPrint("constructB423Headers",
                     "number of samples = {}".format(numSamples))
        cumSamples += numSamples
        starts.append(firstDatetime)
        stops.append(lastDatetime)
    # now need to search for any missing data
    sampleTime = timedelta(seconds=1.0 / sampleFreq)
    # sort by start times
    sortIndices = sorted(list(range(len(starts))), key=lambda k: starts[k])
    check = True
    for i in range(1, len(dataFiles)):
        # get the stop time of the previous dataset
        stopTimePrev = stops[sortIndices[i - 1]]
        startTimeNow = starts[sortIndices[i]]
        if startTimeNow != stopTimePrev + sampleTime:
            warningPrint("constructB423Headers",
                         "There is a gap between the datafiles")
            warningPrint(
                "constructB423Headers",
                "Please separate out datasets with gaps into separate folders",
            )
            warningPrint("constructB423Headers",
                         "Gap found between datafiles:")
            warningPrint("constructB423Headers",
                         "1. {}".format(dataFiles[sortIndices[i - 1]]))
            warningPrint("constructB423Headers",
                         "2. {}".format(dataFiles[sortIndices[i]]))
            check = False
    # if did not pass check, then exit
    if not check:
        errorPrint(
            "constructB423Headers",
            "All data for a single recording must be continuous.",
            quitRun=True,
        )

    # time of first and last sample
    datetimeStart = starts[sortIndices[0]]
    datetimeStop = stops[sortIndices[-1]]

    # global headers
    globalHeaders = {
        "sample_freq": sampleFreq,
        "num_samples": cumSamples,
        "start_time": datetimeStart.strftime("%H:%M:%S.%f"),
        "start_date": datetimeStart.strftime("%Y-%m-%d"),
        "stop_time": datetimeStop.strftime("%H:%M:%S.%f"),
        "stop_date": datetimeStop.strftime("%Y-%m-%d"),
        "meas_channels": 5,
    }
    writer = DataWriter()
    globalHeaders = writer.setGlobalHeadersFromKeywords({}, globalHeaders)

    # channel headers
    channels = ["Hx", "Hy", "Hz", "Ex", "Ey"]
    chanMap = {"Hx": 0, "Hy": 1, "Hz": 2, "Ex": 3, "Ey": 4}
    sensors = {
        "Hx": hxSensor,
        "Hy": hySensor,
        "Hz": hzSensor,
        "Ex": "0",
        "Ey": "0"
    }
    posX2 = {"Hx": 1, "Hy": 1, "Hz": 1, "Ex": dx, "Ey": 1}
    posY2 = {"Hx": 1, "Hy": 1, "Hz": 1, "Ex": 1, "Ey": dy}

    chanHeaders = []
    for chan in channels:
        # sensor serial number
        cHeader = dict(globalHeaders)
        cHeader["ats_data_file"] = ", ".join(dataFilenames)
        cHeader["channel_type"] = chan
        cHeader["scaling_applied"] = False
        cHeader["ts_lsb"] = 1
        cHeader["gain_stage1"] = hGain if isMagnetic(chan) else 1
        cHeader["gain_stage2"] = 1
        cHeader["hchopper"] = 0
        cHeader["echopper"] = 0
        cHeader["pos_x1"] = 0
        cHeader["pos_x2"] = posX2[chan]
        cHeader["pos_y1"] = 0
        cHeader["pos_y2"] = posY2[chan]
        cHeader["pos_z1"] = 0
        cHeader["pos_z2"] = 1
        cHeader["sensor_sernum"] = sensors[chan]
        chanHeaders.append(cHeader)
    chanHeaders = writer.setChanHeadersFromKeywords(chanHeaders, {})
    writer.setOutPath(datapath)
    writer.writeHeaders(globalHeaders,
                        channels,
                        chanMap,
                        chanHeaders,
                        rename=False,
                        ext="h423")
예제 #8
0
    def getPhysicalSamples(self, **kwargs):
        """Get data scaled to physical values
        
        resistics uses field units, meaning physical samples will return the following:

        - Electrical channels in mV/km
        - Magnetic channels in mV
        - To get magnetic fields in nT, calibration needs to be performed

        Notes
        -----
        Once Lemi B423 data is scaled (which optionally happens in getUnscaledSamples), the magnetic channels is in mV with gain applied and the electric channels is uV (microvolts). Therefore:
        
        - Electric channels need to divided by 1000 along with dipole length division in km (east-west spacing and north-south spacing) to return mV/km.
        - Magnetic channels need to be divided by the internal gain value which should be set in the headers
        
        To get magnetic fields in nT, they have to be calibrated.

        Parameters
        ----------
        chans : List[str]
            List of channels to return if not all are required
        startSample : int
            First sample to return
        endSample : int
            Last sample to return
        remaverage : bool
            Remove average from the data
        remzeros : bool
            Remove zeroes from the data
        remnans: bool
            Remove NanNs from the data

        Returns
        -------
        TimeData
            Time data object
        """

        # initialise chans, startSample and endSample with the whole dataset
        options = self.parseGetDataKeywords(kwargs)
        # get unscaled data but with gain scalings applied
        timeData = self.getUnscaledSamples(
            chans=options["chans"],
            startSample=options["startSample"],
            endSample=options["endSample"],
            scale=True,
        )
        # convert to field units and divide by dipole lengths
        for chan in options["chans"]:
            if isElectric(chan):
                timeData.data[chan] = timeData.data[chan] / 1000.0
                timeData.addComment(
                    "Dividing channel {} by 1000 to convert microvolt to millivolt"
                    .format(chan))
            if isMagnetic(chan):
                timeData.data[chan] = timeData.data[chan] / self.getChanGain1(
                    chan)
                timeData.addComment(
                    "Removing gain of {} from channel {}".format(
                        self.getChanGain1(chan), chan))
            if chan == "Ex":
                # multiply by 1000/self.getChanDx same as dividing by dist in km
                timeData.data[chan] = (1000.0 * timeData.data[chan] /
                                       self.getChanDx(chan))
                timeData.addComment(
                    "Dividing channel {} by electrode distance {} km to give mV/km"
                    .format(chan,
                            self.getChanDx(chan) / 1000.0))
            if chan == "Ey":
                # multiply by 1000/self.getChanDy same as dividing by dist in km
                timeData.data[
                    chan] = 1000 * timeData.data[chan] / self.getChanDy(chan)
                timeData.addComment(
                    "Dividing channel {} by electrode distance {:.6f} km to give mV/km"
                    .format(chan,
                            self.getChanDy(chan) / 1000.0))

            # if remove zeros - False by default
            if options["remzeros"]:
                timeData.data[chan] = removeZerosSingle(timeData.data[chan])
            # if remove nans - False by default
            if options["remnans"]:
                timeData.data[chan] = removeNansSingle(timeData.data[chan])
            # remove the average from the data - True by default
            if options["remaverage"]:
                timeData.data[chan] = timeData.data[chan] - np.average(
                    timeData.data[chan])

        # add comments
        timeData.addComment(
            "Remove zeros: {}, remove nans: {}, remove average: {}".format(
                options["remzeros"], options["remnans"],
                options["remaverage"]))
        return timeData