Пример #1
0
def context(fn):
    from pyrocko import datacube_ext

    dpath = os.path.dirname(os.path.abspath(fn))
    mtimes = [os.stat(dpath)[8]]

    dentries = sorted([
        os.path.join(dpath, f) for f in os.listdir(dpath)
        if os.path.isfile(os.path.join(dpath, f))
    ])
    for dentry in dentries:
        fn2 = os.path.join(dpath, dentry)
        mtimes.append(os.stat(fn2)[8])

    mtime = float(max(mtimes))

    if dpath in g_dir_contexts:
        dir_context = g_dir_contexts[dpath]
        if dir_context.mtime == mtime:
            return dir_context

        del g_dir_contexts[dpath]

    entries = []
    for dentry in dentries:
        fn2 = os.path.join(dpath, dentry)
        if not os.path.isfile(fn2):
            continue

        with open(fn2, 'rb') as f:
            first512 = f.read(512)
            if not detect(first512):
                continue

        with open(fn2, 'rb') as f:
            try:
                header, data_arrays, gps_tags, nsamples, _ = \
                        datacube_ext.load(f.fileno(), 3, 0, -1, None)

            except datacube_ext.DataCubeError as e:
                e = DataCubeError(str(e))
                e.set_context('filename', fn)
                raise e

        header = dict(header)
        entries.append(
            DirContextEntry(
                path=os.path.abspath(fn2),
                tstart=util.str_to_time('20' + header['S_DATE'] + ' ' +
                                        header['S_TIME'],
                                        format='%Y/%m/%d %H:%M:%S'),
                ifile=int(header['DAT_NO'])))

    dir_context = DirContext(mtime=mtime, path=dpath, entries=entries)

    return dir_context
Пример #2
0
    def test_load_partial(self):
        fpath = common.test_data_file('test2.cube')
        f = open(fpath, 'r')
        header, da1, gps_tags, nsamples, bookmarks = datacube_ext.load(
            f.fileno(), 2, 0, -1, None)

        for ioff in (0, 10, 1040000, 1048576, 2000000, 1000000):
            f.seek(0)
            header, da2, gps_tags, nsamples, _ = datacube_ext.load(
                f.fileno(), 2, ioff, 10, None)

            f.seek(0)
            header, da3, gps_tags, nsamples, _ = datacube_ext.load(
                f.fileno(), 2, ioff, 10, bookmarks)

            for a1, a2, a3 in zip(da1, da2, da3):
                assert num.all(a1[ioff:ioff+10] == a2) and num.all(a2 == a3)

        f.close()
Пример #3
0
    def test_load_partial(self):
        fpath = common.test_data_file('test2.cube')
        f = open(fpath, 'r')
        header, da1, gps_tags, nsamples, bookmarks = datacube_ext.load(
            f.fileno(), 2, 0, -1, None)

        for ioff in (0, 10, 1040000, 1048576, 2000000, 1000000):
            f.seek(0)
            header, da2, gps_tags, nsamples, _ = datacube_ext.load(
                f.fileno(), 2, ioff, 10, None)

            f.seek(0)
            header, da3, gps_tags, nsamples, _ = datacube_ext.load(
                f.fileno(), 2, ioff, 10, bookmarks)

            for a1, a2, a3 in zip(da1, da2, da3):
                assert num.all(a1[ioff:ioff+10] == a2) and num.all(a2 == a3)

        f.close()
Пример #4
0
def context(fn):
    from pyrocko import datacube_ext

    dpath = os.path.dirname(os.path.abspath(fn))
    mtimes = [os.stat(dpath)[8]]

    dentries = sorted([os.path.join(dpath, f) for f in os.listdir(dpath)
                       if os.path.isfile(os.path.join(dpath, f))])
    for dentry in dentries:
        fn2 = os.path.join(dpath, dentry)
        mtimes.append(os.stat(fn2)[8])

    mtime = float(max(mtimes))

    if dpath in g_dir_contexts:
        dir_context = g_dir_contexts[dpath]
        if dir_context.mtime == mtime:
            return dir_context

        del g_dir_contexts[dpath]

    entries = []
    for dentry in dentries:
        fn2 = os.path.join(dpath, dentry)
        if not os.path.isfile(fn2):
            continue

        with open(fn2, 'rb') as f:
            first512 = f.read(512)
            if not detect(first512):
                continue

        with open(fn2, 'rb') as f:
            try:
                header, data_arrays, gps_tags, nsamples, _ = \
                        datacube_ext.load(f.fileno(), 3, 0, -1, None)

            except datacube_ext.DataCubeError as e:
                e = DataCubeError(str(e))
                e.set_context('filename', fn)
                raise e

        header = dict(header)
        entries.append(DirContextEntry(
            path=os.path.abspath(fn2),
            tstart=util.str_to_time(
                '20' + header['S_DATE'] + ' ' + header['S_TIME'],
                format='%Y/%m/%d %H:%M:%S'),
            ifile=int(header['DAT_NO'])))

    dir_context = DirContext(mtime=mtime, path=dpath, entries=entries)

    return dir_context
Пример #5
0
def get_time_infos(fn):
    from pyrocko import datacube_ext

    with open(fn, 'rb') as f:
        try:
            header, _, gps_tags, nsamples, _ = datacube_ext.load(
                f.fileno(), 1, 0, -1, None)

        except datacube_ext.DataCubeError as e:
            e = DataCubeError(str(e))
            e.set_context('filename', fn)
            raise e

    return dict(header), gps_tags, nsamples
Пример #6
0
def get_time_infos(fn):
    from pyrocko import datacube_ext

    with open(fn, 'rb') as f:
        try:
            header, _, gps_tags, nsamples, _ = datacube_ext.load(
                f.fileno(), 1, 0, -1, None)

        except datacube_ext.DataCubeError as e:
            e = DataCubeError(str(e))
            e.set_context('filename', fn)
            raise e

    return dict(header), gps_tags, nsamples
Пример #7
0
    def benchmark_load(self):
        mode = {0: 'get time range', 1: 'get gps only', 2: 'get samples'}

        fpath = common.test_data_file('test2.cube')
        for irep in range(2):
            for loadflag in (0, 1, 2):
                f = open(fpath, 'r')
                t0 = time.time()
                header, data_arrays, gps_tags, nsamples, bookmarks = \
                    datacube_ext.load(f.fileno(), loadflag, 0, -1, None)

                f.close()
                t1 = time.time()
                print '%s: %10.3f' % (mode[loadflag], t1 - t0)

            t0 = time.time()
            trs = io.load(fpath, format='datacube')
            t1 = time.time()
            print 'with interpolation: %10.3f' % (t1 - t0)
            del trs
Пример #8
0
    def benchmark_load(self):
        mode = {
            0: 'get time range',
            1: 'get gps only',
            2: 'get samples'}

        fpath = common.test_data_file('test2.cube')
        for irep in range(2):
            for loadflag in (0, 1, 2):
                f = open(fpath, 'r')
                t0 = time.time()
                header, data_arrays, gps_tags, nsamples, bookmarks = \
                    datacube_ext.load(f.fileno(), loadflag, 0, -1, None)

                f.close()
                t1 = time.time()
                print '%s: %10.3f' % (mode[loadflag], t1 - t0)

            t0 = time.time()
            trs = io.load(fpath, format='datacube')
            t1 = time.time()
            print 'with interpolation: %10.3f' % (t1 - t0)
            del trs
Пример #9
0
def iload(fn, load_data=True, interpolation='sinc'):
    from pyrocko import datacube_ext
    from pyrocko import signal_ext

    if interpolation not in ('sinc', 'off'):
        raise NotImplemented(
            'no such interpolation method: %s' % interpolation)

    with open(fn, 'rb') as f:
        if load_data:
            loadflag = 2
        else:
            if interpolation == 'off':
                loadflag = 0
            else:
                # must get correct nsamples if interpolation is off
                loadflag = 1

        try:
            header, data_arrays, gps_tags, nsamples, _ = datacube_ext.load(
                f.fileno(), loadflag, 0, -1, None)

        except datacube_ext.DataCubeError as e:
            e = DataCubeError(str(e))
            e.set_context('filename', fn)
            raise e

    header = dict(header)
    deltat = 1.0 / int(header['S_RATE'])
    nchannels = int(header['CH_NUM'])

    ipos, t, fix, nsvs, header_, offset_, nsamples_ = \
        get_extended_timing_context(fn)

    tmin, tmax, icontrol, tcontrol = analyse_gps_tags(
        header_, (ipos, t, fix, nsvs), offset_, nsamples_)

    tmin_ip = round(tmin / deltat) * deltat
    if interpolation != 'off':
        tmax_ip = round(tmax / deltat) * deltat
    else:
        tmax_ip = tmin_ip + (nsamples-1) * deltat

    nsamples_ip = int(round((tmax_ip - tmin_ip)/deltat)) + 1
    # to prevent problems with rounding errors:
    tmax_ip = tmin_ip + (nsamples_ip-1) * deltat

    leaps = num.array(
        [x[0] + util.gps_utc_offset(x[0]) for x in util.read_leap_seconds2()],
        dtype=num.float)

    for i in range(nchannels):
        if load_data:
            arr = data_arrays[i]
            assert arr.size == nsamples

            if interpolation == 'sinc' and icontrol is not None:
                ydata = num.empty(nsamples_ip, dtype=num.float)
                signal_ext.antidrift(
                    icontrol, tcontrol,
                    arr.astype(num.float), tmin_ip, deltat, ydata)

                ydata = num.round(ydata).astype(arr.dtype)
            else:
                ydata = arr

            tr_tmin = tmin_ip
            tr_tmax = None
        else:
            ydata = None
            tr_tmin = tmin_ip
            tr_tmax = tmax_ip

        tr = trace.Trace('', header['DEV_NO'], '', 'p%i' % i, deltat=deltat,
                         ydata=ydata, tmin=tr_tmin, tmax=tr_tmax, meta=header)

        bleaps = num.logical_and(tmin_ip <= leaps, leaps < tmax_ip)

        if num.any(bleaps):
            assert num.sum(bleaps) == 1
            tcut = leaps[bleaps][0]

            for tmin_cut, tmax_cut in [
                    (tr.tmin, tcut), (tcut, tr.tmax+tr.deltat)]:

                try:
                    tr_cut = tr.chop(tmin_cut, tmax_cut, inplace=False)
                    tr_cut.shift(
                        util.utc_gps_offset(0.5*(tr_cut.tmin+tr_cut.tmax)))
                    yield tr_cut

                except trace.NoData:
                    pass

        else:
            tr.shift(util.utc_gps_offset(0.5*(tr.tmin+tr.tmax)))
            yield tr
Пример #10
0
def iload(fn, load_data=True, interpolation='sinc'):
    from pyrocko import datacube_ext
    from pyrocko import signal_ext

    if interpolation not in ('sinc', 'off'):
        raise NotImplementedError(
            'no such interpolation method: %s' % interpolation)

    with open(fn, 'rb') as f:
        if load_data:
            loadflag = 2
        else:
            loadflag = 1

        try:
            header, data_arrays, gps_tags, nsamples, _ = datacube_ext.load(
                f.fileno(), loadflag, 0, -1, None)

        except datacube_ext.DataCubeError as e:
            e = DataCubeError(str(e))
            e.set_context('filename', fn)
            raise e

    header = dict(header)
    deltat = 1.0 / int(header['S_RATE'])
    nchannels = int(header['CH_NUM'])

    ipos, t, fix, nsvs, header_, offset_, nsamples_ = \
        get_extended_timing_context(fn)

    tmin, tmax, icontrol, tcontrol, _ = analyse_gps_tags(
        header_, (ipos, t, fix, nsvs), offset_, nsamples_)

    if icontrol is None:
        logger.warn(
            'No usable GPS timestamps found. Using datacube header '
            'information to guess time. (file: "%s")' % fn)

    tmin_ip = round(tmin / deltat) * deltat
    if interpolation != 'off':
        tmax_ip = round(tmax / deltat) * deltat
    else:
        tmax_ip = tmin_ip + (nsamples-1) * deltat

    nsamples_ip = int(round((tmax_ip - tmin_ip)/deltat)) + 1
    # to prevent problems with rounding errors:
    tmax_ip = tmin_ip + (nsamples_ip-1) * deltat

    leaps = num.array(
        [x[0] + util.gps_utc_offset(x[0]) for x in util.read_leap_seconds2()],
        dtype=num.float)

    if load_data and icontrol is not None:
        ncontrol_this = num.sum(
            num.logical_and(0 <= icontrol, icontrol < nsamples))

        if ncontrol_this <= 1:
            logger.warn(
                'Extrapolating GPS time information from directory context '
                '(insufficient number of GPS timestamps in file: "%s").' % fn)

    for i in range(nchannels):
        if load_data:
            arr = data_arrays[i]
            assert arr.size == nsamples

            if interpolation == 'sinc' and icontrol is not None:

                ydata = num.empty(nsamples_ip, dtype=num.float)
                try:
                    signal_ext.antidrift(
                        icontrol, tcontrol,
                        arr.astype(num.float), tmin_ip, deltat, ydata)

                except signal_ext.Error as e:
                    e = DataCubeError(str(e))
                    e.set_context('filename', fn)
                    e.set_context('n_control_points', icontrol.size)
                    e.set_context('n_samples_raw', arr.size)
                    e.set_context('n_samples_ip', ydata.size)
                    e.set_context('tmin_ip', util.time_to_str(tmin_ip))
                    raise e

                ydata = num.round(ydata).astype(arr.dtype)
            else:
                ydata = arr

            tr_tmin = tmin_ip
            tr_tmax = None
        else:
            ydata = None
            tr_tmin = tmin_ip
            tr_tmax = tmax_ip

        tr = trace.Trace('', header['DEV_NO'], '', 'p%i' % i, deltat=deltat,
                         ydata=ydata, tmin=tr_tmin, tmax=tr_tmax, meta=header)

        bleaps = num.logical_and(tmin_ip <= leaps, leaps < tmax_ip)

        if num.any(bleaps):
            assert num.sum(bleaps) == 1
            tcut = leaps[bleaps][0]

            for tmin_cut, tmax_cut in [
                    (tr.tmin, tcut), (tcut, tr.tmax+tr.deltat)]:

                try:
                    tr_cut = tr.chop(tmin_cut, tmax_cut, inplace=False)
                    tr_cut.shift(
                        util.utc_gps_offset(0.5*(tr_cut.tmin+tr_cut.tmax)))
                    yield tr_cut

                except trace.NoData:
                    pass

        else:
            tr.shift(util.utc_gps_offset(0.5*(tr.tmin+tr.tmax)))
            yield tr
Пример #11
0
def iload(fn, load_data=True, interpolation='sinc'):
    from pyrocko import datacube_ext
    from pyrocko import signal_ext

    if interpolation not in ('sinc', 'off'):
        raise NotImplemented('no such interpolation method: %s' %
                             interpolation)

    with open(fn, 'rb') as f:
        if load_data:
            loadflag = 2
        else:
            if interpolation == 'off':
                loadflag = 0
            else:
                # must get correct nsamples if interpolation is off
                loadflag = 1

        try:
            header, data_arrays, gps_tags, nsamples, _ = datacube_ext.load(
                f.fileno(), loadflag, 0, -1, None)

        except datacube_ext.DataCubeError as e:
            e = DataCubeError(str(e))
            e.set_context('filename', fn)
            raise e

    header = dict(header)
    deltat = 1.0 / int(header['S_RATE'])
    nchannels = int(header['CH_NUM'])

    ipos, t, fix, nsvs, header_, offset_, nsamples_ = \
        get_extended_timing_context(fn)

    tmin, tmax, icontrol, tcontrol = analyse_gps_tags(header_,
                                                      (ipos, t, fix, nsvs),
                                                      offset_, nsamples_)

    tmin_ip = round(tmin / deltat) * deltat
    if interpolation != 'off':
        tmax_ip = round(tmax / deltat) * deltat
    else:
        tmax_ip = tmin_ip + (nsamples - 1) * deltat

    nsamples_ip = int(round((tmax_ip - tmin_ip) / deltat)) + 1
    # to prevent problems with rounding errors:
    tmax_ip = tmin_ip + (nsamples_ip - 1) * deltat

    leaps = num.array(
        [x[0] + util.gps_utc_offset(x[0]) for x in util.read_leap_seconds2()],
        dtype=num.float)

    for i in range(nchannels):
        if load_data:
            arr = data_arrays[i]
            assert arr.size == nsamples

            if interpolation == 'sinc' and icontrol is not None:
                ydata = num.empty(nsamples_ip, dtype=num.float)
                signal_ext.antidrift(icontrol, tcontrol, arr.astype(num.float),
                                     tmin_ip, deltat, ydata)

                ydata = num.round(ydata).astype(arr.dtype)
            else:
                ydata = arr

            tr_tmin = tmin_ip
            tr_tmax = None
        else:
            ydata = None
            tr_tmin = tmin_ip
            tr_tmax = tmax_ip

        tr = trace.Trace('',
                         header['DEV_NO'],
                         '',
                         'p%i' % i,
                         deltat=deltat,
                         ydata=ydata,
                         tmin=tr_tmin,
                         tmax=tr_tmax,
                         meta=header)

        bleaps = num.logical_and(tmin_ip <= leaps, leaps < tmax_ip)

        if num.any(bleaps):
            assert num.sum(bleaps) == 1
            tcut = leaps[bleaps][0]

            for tmin_cut, tmax_cut in [(tr.tmin, tcut),
                                       (tcut, tr.tmax + tr.deltat)]:

                try:
                    tr_cut = tr.chop(tmin_cut, tmax_cut, inplace=False)
                    tr_cut.shift(
                        util.utc_gps_offset(0.5 * (tr_cut.tmin + tr_cut.tmax)))
                    yield tr_cut

                except trace.NoData:
                    pass

        else:
            tr.shift(util.utc_gps_offset(0.5 * (tr.tmin + tr.tmax)))
            yield tr
Пример #12
0
def plot_timeline(fns):
    from matplotlib import pyplot as plt
    from pyrocko import datacube_ext

    h = 3600.

    joined = [[], [], [], []]
    ioff = 0
    for fn in fns:
        with open(fn, 'r') as f:
            header, _, gps_tags, nsamples, _ = datacube_ext.load(
                f.fileno(), 1, 0, -1, None)

        header = dict(header)
        deltat = 1.0 / int(header['S_RATE'])

        ipos = gps_tags[0]
        ipos += ioff

        for i in range(4):
            joined[i].append(gps_tags[i])

        ioff += nsamples

    ipos, t, fix, nsvs = [num.concatenate(x) for x in joined]

    tref = num.median(t - ipos * deltat)
    tref = round(tref / deltat) * deltat

    x = ipos*deltat
    y = (t - tref) - ipos*deltat

    ifix = num.where(fix == 1)
    inofix = num.where(fix == 0)

    plt.plot(x[ifix]/h, y[ifix], '+', ms=5, color=color('chameleon3'))
    plt.plot(x[inofix]/h, y[inofix], 'x', ms=5, color=color('scarletred1'))

    tmin, tmax, icontrol, tcontrol = analyse_gps_tags(
        header, (ipos, t, fix, nsvs), ioff)

    tred = tcontrol - icontrol*deltat - tref
    plt.plot(icontrol*deltat/h, tred, color=color('aluminium6'))
    plt.plot(icontrol*deltat/h, tred, 'o', ms=5, color=color('aluminium6'))

    ymin = math.floor(tred.min() / deltat) * deltat - 0.1 * deltat
    ymax = math.ceil(tred.max() / deltat) * deltat + 0.1 * deltat

    plt.ylim(ymin, ymax)
    ygrid = math.floor(tred.min() / deltat) * deltat
    while ygrid < ymax:
        plt.axhline(ygrid, color=color('aluminium4'))
        ygrid += deltat

    xmin = icontrol[0]*deltat/h
    xmax = icontrol[-1]*deltat/h
    xsize = xmax - xmin
    xmin -= xsize * 0.1
    xmax += xsize * 0.1
    plt.xlim(xmin, xmax)

    plt.xlabel('Uncorrected (quartz) time [h]')
    plt.ylabel('Relative time correction [s]')

    plt.show()
Пример #13
0
def iload(fn, load_data=True, interpolation='sinc'):
    from pyrocko import datacube_ext
    from pyrocko import signal_ext

    if interpolation not in ('sinc', 'off'):
        raise NotImplemented(
            'no such interpolation method: %s' % interpolation)

    with open(fn, 'r') as f:
        if load_data:
            loadflag = 2
        else:
            if interpolation == 'off':
                loadflag = 0
            else:
                # must get correct nsamples if interpolation is off
                loadflag = 1

        header, data_arrays, gps_tags, nsamples, _ = datacube_ext.load(
            f.fileno(), loadflag, 0, -1, None)

    header = dict(header)
    deltat = 1.0 / int(header['S_RATE'])
    nchannels = int(header['CH_NUM'])

    tmin, tmax, icontrol, tcontrol = analyse_gps_tags(
        header, gps_tags, nsamples)

    tmin_ip = round(tmin / deltat) * deltat
    if interpolation != 'off':
        tmax_ip = round(tmax / deltat) * deltat
    else:
        tmax_ip = tmin_ip + (nsamples-1) * deltat

    nsamples_ip = int(round((tmax_ip - tmin_ip)/deltat)) + 1
    # to prevent problems with rounding errors:
    tmax_ip = tmin_ip + (nsamples_ip-1) * deltat

    for i in range(nchannels):
        if load_data:
            arr = data_arrays[i]
            assert arr.size == nsamples

            if interpolation == 'sinc' and icontrol is not None:
                ydata = num.empty(nsamples_ip, dtype=num.float)
                signal_ext.antidrift(
                    icontrol, tcontrol,
                    arr.astype(num.float), tmin_ip, deltat, ydata)

                ydata = num.round(ydata).astype(arr.dtype)
            else:
                ydata = arr

            tr_tmin = tmin_ip
            tr_tmax = None
        else:
            ydata = None
            tr_tmin = tmin_ip
            tr_tmax = tmax_ip

        toff = util.gps_utc_offset(tmin_ip)
        tr_tmin -= toff
        if tr_tmax is not None:
            tr_tmax -= toff

        tr = trace.Trace('', header['DEV_NO'], '', 'p%i' % i, deltat=deltat,
                         ydata=ydata, tmin=tr_tmin, tmax=tr_tmax, meta=header)

        yield tr