示例#1
0
def txgrpshift(infile, outfile, subtri, shift, grpmap):
    '''
	Open the input file infile as a WaveformSet and apply the specified
	nonzero integer sample shift (positive is a time advance) to all
	transmissions from subtriangle subtri. The modified WaveformSet will be
	stored in outfile or, if outfile is None or an empty string, back to
	infile.
	
	If grpmap is not None, it specifies the location of a transmit-group
	file that is used to map subtriangle indices to transmission numbers.
	The grpmap must be provided if WaveformSet(infile).txgrps is not None
	and must be omitted if WaveformSet(infile).txgrps is None.
	'''
    # Validate numeric inputs
    try:
        subtri = subtridx(subtri)
    except (argparse.ArgumentTypeError, ValueError) as e:
        raise ValueError('Invalid subtriangle index: %s' % (e, ))

    try:
        shift = nonzero(shift)
    except (argparse.ArgumentTypeError, ValueError) as e:
        raise ValueError('Invalid shift index: %s' % (e, ))

    if not outfile:
        outfile = infile
        print('Will overwrite input file', infile)
    else:
        print('Will create new input file', outfile)

    wset = WaveformSet.load(infile)

    if wset.txgrps and not grpmap:
        raise ValueError(
            'Argument "grpmap" required when input uses transmit groups')

    # Attempt to assign a group map
    if grpmap: wset.groupmap = loadkeymat(grpmap)

    # Map transmit subtriangle indices to record rows
    txrows = [
        wset.tx2row(wset.element2tx(i))
        for i in range(64 * subtri, 64 * (subtri + 1))
    ]

    # Perform the shifts for each receive-channel record
    for hdr, data in wset.allrecords():
        # Copy the tail of the data
        tail = data[txrows, shift:].copy()
        # Now do the cyclic shift
        data[txrows, -shift:] = data[txrows, :shift]
        data[txrows, :-shift] = tail
        # Store the record (no need to make another copy)
        wset.setrecord(hdr, data, copy=False)

    # Store the output
    wset.store(outfile)
示例#2
0
def getatimes(atarg, freq=1, scalar=True, cols=None):
    '''
	Given a list of files or globs, try to open arrival-time maps matching
	the globs with habis.formats.loadkeymat() and pull the columns
	specified in the sequence cols. If cols is None, all columns will be
	picked. Keys of each map should be transmit-receive pairs (t, r).

	Files are loaded in lexical order. If the same key is present in
	multiple files, the values for that key will a concatenation of the
	values for individual files (each considered as a list) that preserves
	the lexical ordering.

	If the lengths of value lists for keys in the composite arrivla-time
	map, only those keys with maximum-length values will be retained.

	The times are scaled by the frequency to convert the times to samples.

	If scalar is True, values in the returned map will be scalars if a
	single column is pulled. Otherwise, the returned values will always be
	arrays.
	'''
    # Try to load the files one-by-one
    atfiles = sorted(matchfiles(atarg, forcematch=True))

    # Concatenate values to accommodate repeat keys, track max column count
    ncols = 0
    atmap = defaultdict(list)
    for atfile in atfiles:
        for k, v in loadkeymat(atfile, scalar=False, nkeys=2).items():
            atmap[k].extend(vv for vv in v)
            ncols = max(ncols, len(atmap[k]))

    if cols is None:
        acols = list(range(ncols))
    else:
        acols = cols
        print(f'Using columns {acols} from arrival-time records')

    if scalar:
        if len(acols) != 1:
            raise ValueError(
                'Scalar arrival-time map requires a single column specification'
            )
        acols = acols[0]

    return {
        k: freq * np.array(v)[acols]
        for k, v in atmap.items() if len(v) == ncols
    }
示例#3
0
	parser.add_argument('-b' ,'--backscatter', action='store_true',
			help='Extract backscatter waveforms, not arbitrary T-R pairs')

	parser.add_argument('-o', '--output', type=str, default=None,
			help='Output file (default: replace extension with extract.wmz)')

	parser.add_argument('inputs', type=str, nargs='+',
			help='Input WaveformMap files from which to extract')

	args = parser.parse_args(sys.argv[1:])

	# Try to read all input WaveformMap files
	infiles = matchfiles(args.inputs)

	# Read a defined receive-to-transmit-list map
	if args.trmap: args.trmap = loadkeymat(args.trmap, scalar=False)

	# At first, clobber the output
	append = False

	for infile in infiles:
		wmap = WaveformMap.load(infile)

		# Build the appropriate subset of the WaveformMap
		if not args.backscatter: wvs = trextract(wmap, args.trmap, args.random)
		else: wvs = ((k, v) for k, v in wmap.items() if k[0] == k[1])
		omap = WaveformMap(wvs)

		if args.output:
			# Save to common output and switch to append mode
			omap.store(args.output, compression=args.compression, append=append)
示例#4
0
def exdelayEngine(config):
    '''
	Use positions of elements and reflectors specified in the provided
	config, combined with a specified sound speed and reflector radius, to
	estimate the round-trip arrival times from every element to the
	reflector and back.
	'''
    msection = 'measurement'
    esection = 'exdelays'
    try:
        # Try to grab the input and output files
        eltfiles = config.getlist(esection, 'elements')
        rflfile = config.get(esection, 'reflectors')
        timefile = config.get(esection, 'timefile')
    except Exception as e:
        err = 'Configuration must specify elements, reflectors and timefile in [%s]' % esection
        raise HabisConfigError.fromException(err, e)

    # Grab the sound speed and reflector radius
    try:
        c = config.get(msection, 'c', mapper=float)
        r = config.get(msection, 'radius', mapper=float)
    except Exception as e:
        err = 'Configuration must specify c and radius in [%s]' % msection
        raise HabisConfigError.fromException(err, e)

    try:
        # Read an optional global time offset
        offset = config.get(esection, 'offset', mapper=float, default=0.)
    except Exception as e:
        err = 'Invalid optional offset in [%s]' % esection
        raise HabisConfigError.fromException(err, e)

    # Read the element and reflector positions
    eltspos = dict(kp for efile in eltfiles
                   for kp in loadkeymat(efile).items())
    reflpos = np.loadtxt(rflfile, ndmin=2)
    nrefl, nrdim = reflpos.shape

    times = {}
    for elt, epos in eltspos.items():
        nedim = len(epos)
        if not nedim <= nrdim <= nedim + 2:
            raise ValueError(
                'Incompatible reflector and element dimensionalities')
        # Determine one-way distances between element and reflector centers
        dx = norm(epos[np.newaxis, :] - reflpos[:, :nedim], axis=-1)
        # Use encoded wave speed if possible, otherwise use global speed
        try:
            lc = reflpos[:, nedim]
        except IndexError:
            lc = c
        # Use encoded radius if possible, otherwise use global radius
        try:
            lr = reflpos[:, nedim + 1]
        except IndexError:
            lr = r
        # Convert distances to round-trip arrival times
        times[elt, elt] = 2 * (dx - lr) / lc + offset

    # Save the estimated arrival times
    savez_keymat(timefile, times)
示例#5
0
        type=str,
        help='Integral output, keymap or binary matrix (with -c)')

    args = parser.parse_args(sys.argv[1:])

    # Load the tracer configuration
    try:
        config = HabisConfigParser(args.tracer)
    except Exception as e:
        err = f'Unable to load configuration {args.tracer}'
        raise HabisConfigError.fromException(err, e)

    tracer = PathTracer.fromconf(config)

    # Load the element coordinates and target list
    elements = loadkeymat(args.elements)

    if args.trlist:
        # Load the r-[t] keymap and flatten to a trlist
        targets = loadkeymat(args.targets)
        targets = [(t, r) for r, tl in targets.items() for t in tl]
    else:
        targets = mio.readbmat(args.targets)

    s = np.load(args.slowness)

    comm = MPI.COMM_WORLD
    rank, size = comm.rank, comm.size

    args.quiet = rank or args.quiet
    times = tracetimes(tracer, s, elements, targets[rank::size], args.trlist,
示例#6
0
def atimesEngine(config):
    '''
	Use habis.trilateration.ArrivalTimeFinder to determine a set of
	round-trip arrival times from a set of one-to-many multipath arrival
	times. Multipath arrival times are computed as the maximum of
	cross-correlation with a reference pulse, plus some constant offset.
	'''
    asec = 'atimes'
    msec = 'measurement'
    ssec = 'sampling'

    kwargs = {}

    def _throw(msg, e, sec=None):
        if not sec: sec = asec
        raise HabisConfigError.fromException(f'{msg} in [{sec}]', e)

    try:
        # Read all target input lists
        targets = sorted(k for k in config.options(asec)
                         if k.startswith('target'))
        targetfiles = OrderedDict()
        for target in targets:
            targetfiles[target] = matchfiles(config.getlist(asec, target))
            if len(targetfiles[target]) < 1:
                raise HabisConfigError(f'Key {target} matches no inputs')
    except Exception as e:
        _throw('Configuration must specify at least one unique "target" key',
               e)

    try:
        efiles = config.getlist(asec, 'elements', default=None)
        if efiles:
            efiles = matchfiles(efiles)
            kwargs['elements'] = loadmatlist(efiles, nkeys=1)
    except Exception as e:
        _throw('Invalid optional elements', e)

    # Grab the reference file
    try:
        reffile = config.get(msec, 'reference', default=None)
    except Exception as e:
        _throw('Invalid optional reference', e, msec)

    # Grab the output file
    try:
        outfile = config.get(asec, 'outfile')
    except Exception as e:
        _throw('Configuration must specify outfile', e)

    try:
        # Grab the number of processes to use (optional)
        nproc = config.get('general',
                           'nproc',
                           mapper=int,
                           failfunc=process.preferred_process_count)
    except Exception as e:
        _throw('Invalid optional nproc', e, 'general')

    try:
        # Determine the sampling period and a global temporal offset
        dt = config.get(ssec, 'period', mapper=float)
        t0 = config.get(ssec, 'offset', mapper=float)
    except Exception as e:
        _throw('Configuration must specify period and offset', e, ssec)

    # Override the number of samples in WaveformMap
    try:
        kwargs['nsamp'] = config.get(ssec, 'nsamp', mapper=int)
    except HabisNoOptionError:
        pass
    except Exception as e:
        _throw('Invalid optional nsamp', e, ssec)

    # Determine the oversampling rate to use when cross-correlating
    try:
        osamp = config.get(ssec, 'osamp', mapper=int, default=1)
    except Exception as e:
        _throw('Invalid optional osamp', e, ssec)

    try:
        neighbors = config.get(asec, 'neighbors', default=None)
        if neighbors:
            kwargs['neighbors'] = loadkeymat(neighbors, dtype=int)
    except Exception as e:
        _throw('Invalid optional neighbors', e)

    # Determine the range of elements to use; default to all (as None)
    try:
        kwargs['minsnr'] = config.getlist(asec, 'minsnr', mapper=int)
    except HabisNoOptionError:
        pass
    except Exception as e:
        _throw('Invalid optional minsnr', e)

    # Determine a temporal window to apply before finding delays
    try:
        kwargs['window'] = config.get(asec, 'window')
    except HabisNoOptionError:
        pass
    except Exception as e:
        _throw('Invalid optional window', e)

    # Determine an energy leakage threshold
    try:
        kwargs['eleak'] = config.get(asec, 'eleak', mapper=float)
    except HabisNoOptionError:
        pass
    except Exception as e:
        _throw('Invalid optional eleak', e)

    # Determine a temporal window to apply before finding delays
    try:
        kwargs['bandpass'] = config.get(asec, 'bandpass')
    except HabisNoOptionError:
        pass
    except Exception as e:
        _throw('Invalid optional bandpass', e)

    # Determine peak-selection criteria
    try:
        kwargs['peaks'] = config.get(asec, 'peaks')
    except HabisNoOptionError:
        pass
    except Exception as e:
        _throw('Invalid optional peaks', e)

    # Determine IMER criteria
    try:
        kwargs['imer'] = config.get(asec, 'imer')
    except HabisNoOptionError:
        pass
    except Exception as e:
        _throw('Invalid optional imer', e)

    maskoutliers = config.get(asec, 'maskoutliers', mapper=bool, default=False)
    optimize = config.get(asec, 'optimize', mapper=bool, default=False)
    kwargs['negcorr'] = config.get(asec, 'negcorr', mapper=bool, default=False)
    kwargs['signsquare'] = config.get(asec,
                                      'signsquare',
                                      mapper=bool,
                                      default=False)
    kwargs['flipref'] = config.get(asec, 'flipref', mapper=bool, default=False)

    # Check for delay cache specifications as boolean or file suffix
    cachedelay = config.get(asec, 'cachedelay', default=True)
    if isinstance(cachedelay, bool) and cachedelay: cachedelay = 'delays.npz'

    try:
        # Remove the nearmap file key
        guesses = shsplit(kwargs['peaks'].pop('nearmap'))
        guesses = loadmatlist(guesses, nkeys=2, scalar=False)
    except IOError as e:
        guesses = None
        print(f'WARNING - Ignoring nearmap: {e}', file=sys.stderr)
    except (KeyError, TypeError, AttributeError):
        guesses = None
    else:
        # Adjust delay time scales
        guesses = {k: (v - t0) / dt for k, v in guesses.items()}

    # Adjust the delay time scales for the neardefault, if provided
    try:
        v = kwargs['peaks']['neardefault']
    except KeyError:
        pass
    else:
        kwargs['peaks']['neardefault'] = (v - t0) / dt

    try:
        # Load the window map, if provided
        winmap = shsplit(kwargs['window'].pop('map'))
        winmap = loadmatlist(winmap, nkeys=2, scalar=False)
    except IOError as e:
        winmap = None
        print(f'WARNING - Ignoring window map: {e}', file=sys.stderr)
    except (KeyError, TypeError, AttributeError):
        winmap = None
    else:
        # Replace the map argument with the loaded array
        kwargs['window']['map'] = winmap

    times = OrderedDict()

    # Process each target in turn
    for i, (target, datafiles) in enumerate(targetfiles.items()):
        if guesses:
            # Pull the column of the nearmap for this target
            nearmap = {k: v[i] for k, v in guesses.items()}
            kwargs['peaks']['nearmap'] = nearmap

        if cachedelay:
            delayfiles = buildpaths(datafiles, extension=cachedelay)
        else:
            delayfiles = [None] * len(datafiles)

        times[target] = dict()

        dltype = 'IMER' if kwargs.get('imer', None) else 'cross-correlation'
        ftext = 'files' if len(datafiles) != 1 else 'file'
        print(
            f'Finding {dltype} delays for {target} ({len(datafiles)} {ftext})')

        for (dfile, dlayfile) in zip(datafiles, delayfiles):
            kwargs['cachefile'] = dlayfile

            delays = finddelays(nproc, dfile, reffile, osamp, **kwargs)

            # Note the receive channels in this data file
            lrx = set(k[1] for k in delays.keys())

            # Convert delays to arrival times
            delays = {k: v * dt + t0 for k, v in delays.items()}

            if any(dv < 0 for dv in delays.values()):
                raise ValueError('Non-physical, negative delays exist')

            if maskoutliers:
                # Remove outlying values from the delay dictionary
                delays = stats.mask_outliers(delays)

            if optimize:
                # Prepare the arrival-time finder
                atf = trilateration.ArrivalTimeFinder(delays)
                # Compute the optimized times for this data file
                optimes = {(k, k): v for k, v in atf.lsmr() if k in lrx}
            else:
                # Just pass through the desired times
                optimes = delays

            times[target].update(optimes)

    # Build the combined times list
    for tmap in times.values():
        try:
            rxset.intersection_update(tmap.keys())
        except NameError:
            rxset = set(tmap.keys())

    if not len(rxset):
        raise ValueError(
            'Different targets have no common receive-channel indices')

    # Cast to Python float to avoid numpy dependencies in pickled output
    ctimes = {i: [float(t[i]) for t in times.values()] for i in sorted(rxset)}

    # Save the output as a pickled map
    savez_keymat(outfile, ctimes)
示例#7
0
def finddelays(nproc=1, *args, **kwargs):
    '''
	Distribute, among nproc processes, delay analysis for waveforms using
	calcdelays(). All *args and **kwargs, are passed to calcdelays on each
	participating process. This function explicitly sets the "queue",
	"rank", "grpsize", and "delaycache" arguments of calcdelays, so *args
	and **kwargs should not contain these values.

	The delaycache argument is built from an optional file specified in
	cachefile, which should be a map from transmit-receive pair (t, r) to a
	precomputed delay, loadable with habis.formats.loadkeymat.
	'''
    forbidden = {'queue', 'rank', 'grpsize', 'delaycache'}
    forbidden.intersection_update(kwargs)
    if forbidden:
        raise TypeError("Forbidden argument '{next(iter(forbidden))}'")

    cachefile = kwargs.pop('cachefile', None)

    # Try to read an existing delay map
    try:
        kwargs['delaycache'] = loadkeymat(cachefile)
    except (KeyError, ValueError, IOError):
        pass

    # Create a result queue and a dictionary to accumulate results
    queue = multiprocessing.Queue(nproc)
    delays = {}

    # Extend the kwargs to include the result queue
    kwargs['queue'] = queue
    # Extend the kwargs to include the group size
    kwargs['grpsize'] = nproc

    # Keep track of waveform statistics
    stats = defaultdict(int)

    # Spawn the desired processes to perform the cross-correlation
    with process.ProcessPool() as pool:
        for i in range(nproc):
            # Pick a useful process name
            procname = process.procname(i)
            # Add the group rank to the kwargs
            kwargs['rank'] = i
            # Extend kwargs to contain the queue (copies kwargs)
            pool.addtask(target=calcdelays,
                         name=procname,
                         args=args,
                         kwargs=kwargs)

        pool.start()

        # Wait for all processes to respond
        responses, deadpool = 0, False
        while responses < nproc:
            try:
                results = queue.get(timeout=0.1)
            except pyqueue.Empty:
                # Loosely join to watch for a dead pool
                pool.wait(timeout=0.1, limit=1)
                if not pool.unjoined:
                    # Note a dead pool, give read one more try
                    if deadpool: break
                    else: deadpool = True
            else:
                delays.update(results[0])
                for k, v in results[1].items():
                    if v: stats[k] += v
                responses += 1

        if responses != nproc:
            print(f'WARNING: Proceeding with {responses} of {nproc} '
                  'subprocess results. A subprocess may have died.')

        pool.wait()

    if stats:
        print(f'For file {os.path.basename(args[0])} '
              f'({len(delays)} identfied times):')
        for k, v in sorted(stats.items()):
            if v:
                wfn = 'waveforms' if v > 1 else 'waveform'
                print(f'  {v} {k} {wfn}')

    if len(delays) and cachefile:
        # Save the computed delays, if desired
        try:
            savez_keymat(cachefile, delays)
        except (ValueError, IOError):
            pass

    return delays
示例#8
0
        tfiles = matchfiles(config.getlist(tsec, 'timefile'), forcematch=False)
        # Determine the local shares of every file
        tfiles = flocshare(tfiles, MPI.COMM_WORLD)
        # Pull out local share of locally available arrival times
        atimes = {}
        for tf, (st, ln) in tfiles.items():
            atimes.update(
                getatimes(tf, elements, 0, False, vclip, mask_outliers, st,
                          ln))

    with watchConfigErrors('exclusions', tsec):
        # Try to load a list of arrival times to exclude
        efiles = matchfiles(config.getlist(tsec, 'exclusions', default=[]),
                            forcematch=False)
        exclusions = {(t, r)
                      for f in efiles for r, tl in loadkeymat(f).items()
                      for t in tl}
        if exclusions:
            if not rank:
                print(
                    f'{len(exclusions)} measurement pairs marked for exclusion'
                )
            goodpairs = set(atimes).difference(exclusions)
            atimes = {k: atimes[k] for k in goodpairs}

    # Convert the scalar slowness or file name into a matrix
    try:
        s = float(s)
    except (ValueError, TypeError):
        s = np.load(s).astype(np.float64)
    else: