Ejemplo n.º 1
0
def parseKeywords(default: Dict[str, Any],
                  keywords: Dict[str, Any],
                  printkw: bool = True):
    """General print to terminal

    Parameters
    ----------
    default : Dict[str, Any]
        Dictionary of default parameters
    keywords : Dict[str, Any]
        Dictionary of optional keywords
    printkw : bool
        Print out the keywords

    Returns
    -------
    Dict[str, Any]
        The dictionary with the appropriate defaults overwritten by keyword arguments
    """
    for w in default:
        if w in keywords:
            default[w] = keywords[w]
    if printkw:
        generalPrint(
            "{}::utilsCheck::parseKeywords".format(inspect.stack()[1][3]),
            str(default))
    return default
Ejemplo n.º 2
0
def test_generalPrint(capfd) -> None:
    from resistics.common.print import generalPrint

    generalPrint("test", "test")
    captured = capfd.readouterr()
    out = captured.out[9:]
    assert out == "test: test\n"
Ejemplo n.º 3
0
    def printText(self, infoStr: str) -> None:
        """General print to terminal

        Parameters
        ----------
        infoStr : str
            The string to print to the console
        """
        generalPrint("{} Info".format(self.__class__.__name__), infoStr)
Ejemplo n.º 4
0
def projectText(infoStr: str) -> None:
    """General print to terminal

    Parameters
    ----------
    infoStr : str
        The string to print to the console
    """
    generalPrint("{} Info".format(inspect.stack()[1][3]), infoStr)
Ejemplo n.º 5
0
def checkFilepath(path: str) -> bool:
    """Check if file exists
    
    TODO: Should check that it is actually a file

    Parameters
    ----------
    path : str
        Filepath to check

    Returns
    -------
    out : bool
        True if file exists
    """
    if not os.path.exists(path):
        generalPrint("utilsio::checkFilepath",
                     "File path {} could not be found.".format(path))
        return False
    return True
Ejemplo n.º 6
0
def downsampleData(
    data: Dict[str, np.ndarray], downsampleFactor: int
) -> Dict[str, np.ndarray]:
    """Decimate array data
    
    Parameters
    ----------
    data : Dict
        Dictionary with channel as keys and data as values
    downsampleFactor : int
        The factor to downsample the data by

    Returns
    -------
    data : Dict
        Dictionary with channel as keys and data as values
    """
    # if downsampleFactor is 1, nothing to do
    if downsampleFactor == 1:
        return data

    # a downsample factor should not be greater than 13
    # hence factorise downsampleFactors that are greater than this
    if downsampleFactor > 13:
        downsamples = factorise(downsampleFactor)
        generalPrint(
            "Decimation",
            "Downsample factor {} greater than 13. Downsampling will be performed in multiple steps of {}".format(
                downsampleFactor, arrayToStringInt(downsamples)
            ),
        )
    else:
        downsamples = [downsampleFactor]

    # downsample for each factor in downsamples
    for factor in downsamples:
        for c in data:
            data[c] = signal.decimate(data[c], factor, zero_phase=True)
    return data
Ejemplo n.º 7
0
def loadConfig(filepath: str = "") -> ConfigObj:
    """Get configuration information

    Parameters
    ----------
    filepath : str, optional
        The path to the configuration file

    Returns
    -------
    config : ConfigObj
        ConfigObj with global configuration parameters
    """
    configFile = getDefaultConfigFilepath()
    if filepath == "" or not checkFilepath(filepath):
        config = ConfigObj(configspec=configFile)
    else:
        config = ConfigObj(filepath, configspec=configFile)
        generalPrint("loadConfig", "Loading configuration file {:s}".format(filepath))
    validator = Validator()
    result = config.validate(validator)
    if not result:
        errorPrint("loadConfigFile", "Config file validation failed", quitrun=True)
    return config
Ejemplo n.º 8
0
def measB423Headers(
    datapath: str,
    sampleFreq: float,
    hxSensor: int = 0,
    hySensor: int = 0,
    hzSensor: int = 0,
    hGain: int = 1,
    dx: float = 1,
    dy: float = 1,
) -> None:
    """Read a single B423 measurement directory and construct headers
    
    Parameters
    ----------
    datapath : str
        The path to the measurement
    sampleFreq : float
        The sampling frequency of the data
    hxSensor : str, optional
        The x direction magnetic sensor, used for calibration
    hySensor : str, optional
        The y direction magnetic sensor, used for calibration
    hzSensor : str, optional
        The z direction magnetic sensor, used for calibration
    hGain : int
        Any gain on the magnetic channels which will need to be removed
    dx : float, optional
        Distance between x electrodes
    dy : float, optional
        Distance between y electrodes
    """
    from resistics.common.print import generalPrint, warningPrint, errorPrint
    from resistics.time.writer import TimeWriter

    dataFiles = glob.glob(os.path.join(datapath, "*.B423"))
    dataFilenames = [os.path.basename(dFile) for dFile in dataFiles]
    starts = []
    stops = []
    cumSamples = 0
    for idx, dFile in enumerate(dataFiles):
        generalPrint("constructB423Headers",
                     "Reading data file {}".format(dFile))
        dataHeaders, firstDatetime, lastDatetime, numSamples = readB423Params(
            dFile, sampleFreq, 1024, 30)
        print(dataHeaders)
        generalPrint(
            "constructB423Headers",
            "start time = {}, end time = {}".format(firstDatetime,
                                                    lastDatetime),
        )
        generalPrint("constructB423Headers",
                     "number of samples = {}".format(numSamples))
        cumSamples += numSamples
        starts.append(firstDatetime)
        stops.append(lastDatetime)
    # now need to search for any missing data
    sampleTime = timedelta(seconds=1.0 / sampleFreq)
    # sort by start times
    sortIndices = sorted(list(range(len(starts))), key=lambda k: starts[k])
    check = True
    for i in range(1, len(dataFiles)):
        # get the stop time of the previous dataset
        stopTimePrev = stops[sortIndices[i - 1]]
        startTimeNow = starts[sortIndices[i]]
        if startTimeNow != stopTimePrev + sampleTime:
            warningPrint("constructB423Headers",
                         "There is a gap between the datafiles")
            warningPrint(
                "constructB423Headers",
                "Please separate out datasets with gaps into separate folders",
            )
            warningPrint("constructB423Headers",
                         "Gap found between datafiles:")
            warningPrint("constructB423Headers",
                         "1. {}".format(dataFiles[sortIndices[i - 1]]))
            warningPrint("constructB423Headers",
                         "2. {}".format(dataFiles[sortIndices[i]]))
            check = False
    # if did not pass check, then exit
    if not check:
        errorPrint(
            "constructB423Headers",
            "All data for a single recording must be continuous.",
            quitrun=True,
        )

    # time of first and last sample
    datetimeStart = starts[sortIndices[0]]
    datetimeStop = stops[sortIndices[-1]]

    # global headers
    globalHeaders = {
        "sample_freq": sampleFreq,
        "num_samples": cumSamples,
        "start_time": datetimeStart.strftime("%H:%M:%S.%f"),
        "start_date": datetimeStart.strftime("%Y-%m-%d"),
        "stop_time": datetimeStop.strftime("%H:%M:%S.%f"),
        "stop_date": datetimeStop.strftime("%Y-%m-%d"),
        "meas_channels": 5,
    }
    writer = TimeWriter()
    globalHeaders = writer.setGlobalHeadersFromKeywords({}, globalHeaders)

    # channel headers
    channels = ["Hx", "Hy", "Hz", "Ex", "Ey"]
    chanMap = {"Hx": 0, "Hy": 1, "Hz": 2, "Ex": 3, "Ey": 4}
    sensors = {
        "Hx": hxSensor,
        "Hy": hySensor,
        "Hz": hzSensor,
        "Ex": "0",
        "Ey": "0"
    }
    posX2 = {"Hx": 1, "Hy": 1, "Hz": 1, "Ex": dx, "Ey": 1}
    posY2 = {"Hx": 1, "Hy": 1, "Hz": 1, "Ex": 1, "Ey": dy}

    chanHeaders = []
    for chan in channels:
        # sensor serial number
        cHeader = dict(globalHeaders)
        cHeader["ats_data_file"] = ", ".join(dataFilenames)
        cHeader["channel_type"] = chan
        cHeader["scaling_applied"] = False
        cHeader["ts_lsb"] = 1
        cHeader["gain_stage1"] = hGain if isMagnetic(chan) else 1
        cHeader["gain_stage2"] = 1
        cHeader["hchopper"] = 0
        cHeader["echopper"] = 0
        cHeader["pos_x1"] = 0
        cHeader["pos_x2"] = posX2[chan]
        cHeader["pos_y1"] = 0
        cHeader["pos_y2"] = posY2[chan]
        cHeader["pos_z1"] = 0
        cHeader["pos_z2"] = 1
        cHeader["sensor_sernum"] = sensors[chan]
        chanHeaders.append(cHeader)
    chanHeaders = writer.setChanHeadersFromKeywords(chanHeaders, {})
    writer.setOutPath(datapath)
    writer.writeHeaders(globalHeaders,
                        channels,
                        chanMap,
                        chanHeaders,
                        rename=False,
                        ext="h423")