Esempio n. 1
0
def get_array2d(start,end,axis='X',prefix='./data',**kwargs):
    '''
    '''
    nproc = kwargs.pop('nproc',4)
    bandpass = kwargs.pop('bandpass',None)
    blrms = kwargs.pop('blrms',None)
    fftlen = kwargs.pop('fftlen',2**8)
    overlap = fftlen/2

    # check existance of the spectrogram data
    fname_hdf5 = fname_hdf5_asd(start,end,prefix,axis)
    if os.path.exists(fname_hdf5):
        specgram = Spectrogram.read(fname_hdf5)
        if blrms:
            timeseries = specgram.crop_frequencies(blrms[0],blrms[1]).sum(axis=1)
            return timeseries
        return specgram
    
    # If spectrogram dose not exist, calculate it from timeseries data.
    try:
        fname = fname_gwf(start,end,prefix='./data')
        chname = get_seis_chname(start,end,axis=axis)
        # check existance of the timeseries data
        if os.path.exists(fname):
            data = TimeSeries.read(fname,chname,nproc=nproc)
        else:
            # when timeseries data dose not exist
            fnamelist = existedfilelist(start,end)
            chname = get_seis_chname(start,end)
            datadict = TimeSeriesDict.read(fnamelist,chname,nproc=nproc)
            datadict = datadict.resample(32)
            datadict = datadict.crop(start,end)
            chname = get_seis_chname(start,end,axis=axis)
            datadict.write(fname,format='gwf.lalframe')
            data = TimeSeries.read(fname,chname,nproc=nproc)
            # If data broken, raise Error.
            if data.value.shape[0] != 131072:
                log.debug(data.value.shape)
                log.debug('####### {0} {1}'.format(start,end))
                raise ValueError('data broken')
    except:
        log.debug(traceback.format_exc())
        raise ValueError('!!!')

    # if data broken, raise Error.
    if data.value.shape[0] != 131072: # (131072 = 2**17 = 2**12[sec] * 2**5[Hz] )
        log.debug(data.value.shape)
        log.debug('!!!!!!!! {0} {1}'.format(start,end))
        raise ValueError('data broken')

    # calculate from timeseries data
    specgram = data.spectrogram2(fftlength=fftlen,overlap=overlap,nproc=nproc)
    specgram.write(fname_hdf5,format='hdf5',overwrite=True)
    return specgram
Esempio n. 2
0
def get_spectrogram(start, end, axis='X', seis='EXV', **kwargs):
    ''' Get Spectrogram    

    Parameters
    ----------
    start : `int`
        start GPS time.
    end : `int`
       end GPS time.

    Returns
    -------
    specgram : `gwpy.spectrogram.Spectrogram`
        spectrogram.
    '''
    nproc = kwargs.pop('nproc', 3)
    bandpass = kwargs.pop('bandpass', None)
    fftlen = kwargs.pop('fftlen', 2**8)
    diff = kwargs.pop('diff', False)
    fs = kwargs.pop('fs', 256)
    fname_hdf5 = fname_specgram(start, end, prefix=seis, axis=axis)

    # Load specgram from hdf5 file
    if os.path.exists(fname_hdf5):
        specgram = Spectrogram.read(fname_hdf5, format='hdf5')
        return specgram

    # If no file, make specgram from timeseries data
    try:
        chname = get_seis_chname(start, end, axis=axis, seis=seis)[0]
        fnamelist = existedfilelist(start, end)
        data = TimeSeries.read(fnamelist, chname, nproc=nproc)
        data = data.resample(fs)
        data = data.crop(start, end)
    except:
        log.debug(traceback.format_exc())
        raise ValueError('!!! {0} {1}'.format(start, end))

    # calculate specgram
    specgram = data.spectrogram2(fftlength=fftlen,
                                 overlap=fftlen / 2,
                                 nproc=nproc)
    try:
        fname_dir = '/'.join(fname_hdf5.split('/')[:4])
        if not os.path.exists(fname_dir):
            os.makedirs(fname_dir)
        specgram.write(fname_hdf5, format='hdf5', overwrite=True)
        log.debug('Make {0}'.format(fname_hdf5))
    except:
        log.debug(traceback.format_exc())
        raise ValueError('!!!')
    return specgram
Esempio n. 3
0
def save_spectrogram(segmentlist, fftlength=2**10, overlap=2**9, **kwargs):
    '''
    
    '''
    log.debug('Save spectrograms')
    lackofdata = SegmentList()
    prefix = kwargs.pop('prefix', './data')
    write = kwargs.pop('write', True)
    skip = kwargs.pop('skip', False)

    fnames = [fname_png_asd(start, end, prefix) for start, end in segmentlist]
    not_checked = _check_skip(segmentlist, fnames)

    log.debug('{0}(/{1}) are not checked'.format(len(not_checked),
                                                 len(segmentlist)))
    log.debug('Save spectrograms..')
    for i, segment in enumerate(not_checked):
        try:
            #fname = fname_gwf(start,end,prefix)
            fname = existedfilelist(segment[0], segment[1])
            chname = get_seis_chname(segment[0], segment[1])
            hoge = kwargs.pop('fftlength', 'None')
            hoge = kwargs.pop('overlap', 'None')
            data = TimeSeriesDict.read(fname, chname, **kwargs)
            data = data.resample(32)
            data = data.crop(segment[0], segment[1])
        except:
            log.debug(traceback.format_exc())
            raise ValueError('No such data {0}'.format(fname))
        # plot
        kwargs['fftlength'] = fftlength
        kwargs['overlap'] = overlap
        sglist = _calc_spectrogram(data, segment, **kwargs)
        #asdlist = [sg.percentile(50) for sg in sglist]
        fname = fname_png_asd(segment[0], segment[1], prefix)
        #plot_asd(asdlist,fname,**kwargs)
        log.debug('{0:03d}/{1:03d} {2} '.format(i, len(segmentlist), fname) +
                  'Plot')
Esempio n. 4
0
def check(start,
          end,
          plot=False,
          nproc=2,
          cl=0.05,
          tlen=4096,
          sample_rate=16,
          seis='EXV',
          axis='X'):
    ''' Return the data status of a seismometer at specified time.

    Check the data status with three steps; 

    Parameters
    ----------
    start : `int`
        start GPS time.
    end : `int`
        end GPS time.
    seis : `str`, optional
        seis of the seismometer. default is 'EXV'
    axis : `str`
        axis of the seismometer. default is 'X'
    nproc : `int`
        number of CPU process.
    cl : `float`
        confidential level. default is 0.05.
    tlen : `int`
        time of the length of data. default is 4096 sec.
    sample_rate : `int`
        sample rate of data. default is 16.
    plot : `Bool`
        If you want to plot a timeseries, True. default is True.

    Returns
    -------
    status : `str`
        status of data
    '''
    # Remove bad data manualy
    if (start, end) in badsegment:
        return 'BadData'

    # Check DAQ trouble
    try:
        if axis == 'all':
            axes = ['X', 'Y', 'Z']
        elif axis in ['X', 'Y', 'Z']:
            axes = [axis]
        else:
            raise ValueError('Invalid axis {0}'.format(axis))

        data3 = []
        for axis in axes:
            chname = get_seis_chname(start, end, seis=seis, axis=axis)[0]
            fnamelist = existedfilelist(start, end)
            data = TimeSeries.read(fnamelist, chname, nproc=nproc)
            data = data.resample(sample_rate)
            data = data.crop(start, end)
            data = data.detrend('linear')
            data3 += [data]

    except ValueError as e:
        if 'Cannot append discontiguous TimeSeries' in e.args[0]:
            return 'NoData_LackofData'
        elif 'Failed to read' in e.args[0]:
            return 'NoData_NoChannel'
        elif 'array must not contain infs or NaNs' in e.args[0]:
            return 'Nodata_AnyNan'
        elif 'no Fr{Adc,Proc,Sim}Data structures with the name' in e.args[0]:
            return 'Nodata_FailedtoRead'
        elif 'Creation of unknown checksum type' in e.args[0]:
            return 'Nodata_FailedtoRead'
        else:
            log.debug(traceback.format_exc())
            raise ValueError('!!!')
    except IndexError as e:
        if 'cannot read TimeSeries from empty source list' in e.args[0]:
            return 'NoData_Empty'
        else:
            log.debug(traceback.format_exc())
            raise ValueError('!!!')
    except RuntimeError as e:
        if 'Failed to read' in e.args[0]:
            return 'NoData_FailedtoRead'
        elif 'Not a frame file (Invalid FrHeader)' in e.args[0]:
            return 'NoData_FailedtoRead'
        elif 'Missing FrEndOfFile structure' in e.args[0]:
            return 'NoData_FailedtoRead'
        else:
            log.debug(traceback.format_exc())
            raise ValueError('!!!')
    except TypeError as e:
        if 'NoneType' in e.args[0]:
            return 'NoData_NoChannel'
        else:
            log.debug(traceback.format_exc())
            raise ValueError('!!!')
    except IORegistryError as e:
        if 'Format' in e.args[0]:
            return 'NoData_InvalidFormat'
        else:
            log.debug(traceback.format_exc())
            raise ValueError('!!!')
    except:
        log.debug(traceback.format_exc())
        raise ValueError('!!!')

    # Check Outlier
    for data in data3:
        if data.shape[0] != tlen * sample_rate:
            return 'NoData_FewData'
        if data.std().value == 0.0:
            return 'NoData_AllZero'
        if any(data.value == 0.0):
            return 'NoData_AnyZero'
        if any(np.diff(data.value) == 0.0):
            return 'WrongData_AnyConstant'

    # Check
    if plot:
        fig = plt.figure(figsize=(19, 12))
        gs = gridspec.GridSpec(len(data3),
                               3,
                               width_ratios=[3, 1, 3],
                               wspace=0.15)

    n = len(data3)
    for i, data in enumerate(data3):
        std = data.std().value
        mean = data.mean().value
        _max = data.abs().max().value
        if _max > mean + std * 5:
            return 'Normal_Reject'
        elif _max > 1000:  #count
            return 'Normal_Reject'
        elif _max < 1:  # count
            return 'Normal_Reject'
        else:
            pass

        if plot:
            ax0 = plt.subplot(gs[i, 0])
            ax1 = plt.subplot(gs[i, 1:2])
            ax2 = plt.subplot(gs[i, 2:])
            ax0.set_ylabel('Counts')
            ax0.plot(data, 'k')
            ax0.hlines(mean, start, end, 'k')
            ax0.set_xscale('auto-gps')
            ax0.set_xlim(start, end)
            ymin, ymax = mean - std * 6, mean + std * 6
            _ymin, _ymax = mean - std * 5, mean + std * 5
            ax0.hlines(_ymin, start, end, 'k')
            ax0.hlines(_ymax, start, end, 'k')
            ax0.hlines(_max, start, end, 'r', linestyle='--')
            ax0.hlines(-1 * _max, start, end, 'r', linestyle='--')
            ax0.set_ylim(ymin, ymax)
            mu, sigma = norm.fit(data.value)
            y = np.linspace(ymin, ymax, 100)
            p = norm.pdf(y, mu, sigma)
            ax1.plot(p, y, 'k', linewidth=2)
            n, bins, patches = ax1.hist(data.value,
                                        50,
                                        normed=1,
                                        facecolor='black',
                                        orientation="horizontal",
                                        alpha=0.50)
            ax1.set_ylim(ymin, ymax)
            ax1.set_xlim(0, 1. / (std * 2))
            ax1.set_xticklabels(np.arange(0.0, 0.15, 0.02), rotation=-90)
            ax1.set_xlabel('Probability Density')
            ax2.set_ylim(ymin, ymax)
    if plot:
        fname = './data/{2}/{0}_{1}.png'.format(start, end, seis)
        log.debug(fname)
        plt.savefig(fname)
        plt.close()

    return 'Normal'
Esempio n. 5
0
    # Parse arguments
    import argparse 
    parser = argparse.ArgumentParser(description='description')
    parser.add_argument('dataname',help='data name which you want to calculate')
    args = parser.parse_args()
    dataname = args.dataname

    # Read timeseries data of Trillium120
    from Kozapy.utils import filelist
    from lib.channel import get_seis_chname
    m31 = True
    if m31:
        start = tconvert('May31 2019 00:00:00')
        end = start + 2**13
        fname = filelist(start,end)
        chname = get_seis_chname(start,end,place='EXV')
        print(chname)
    else:
        fname = fname_gwf_tr120(dataname)
        chname = frtools.get_channels(fname)        
    try:
        data = TimeSeriesDict.read(fname,chname,**kwargs)
    except:
        print(fname)
        raise ValueError('!')

    exv,ixv,ixv2,eyv = check_channel_name(chname)
    exv = check_data(data,exv)
    ixv = check_data(data,ixv)
    ixv2 = check_data(data,ixv2)
    eyv = check_data(data,eyv)
Esempio n. 6
0
    for i in range(4, 9, 1):
        for j in range(1, 32, 1):
            hoge['cd{0:02d}_{1:02d}'.format(i,j)] =  \
            '{0:02d}/{1:02d} 2019 00:00:00 JST'.format(i+2,j)
            hoge['cd{0:02d}_{1:02d}d'.format(i,j)] =  \
            '{0:02d}/{1:02d} 2019 12:00:00 JST'.format(i+2,j)
    #
    try:
        start = tconvert(hoge[dataname])
    except:
        print(traceback.format_exc())
        raise ValueError('Nodata')

    end = start + tlen
    fname = filelist(start, end)
    chname = get_seis_chname(start, end, place='EXV', axis='X')
    chname += get_seis_chname(start, end, place='IXV', axis='X')
    chname += get_seis_chname(start, end, place='IXV', axis='Y')
    chname += get_seis_chname(start, end, place='EYV', axis='Y')
    print(chname)
    try:
        #data = TimeSeriesDict.read(fname,chname,start=start,end=end,**kwargs)
        data = TimeSeriesDict.read(fname, chname, **kwargs)
        data = data.crop(start, end)
    except:
        print(traceback.format_exc())
        raise ValueError('???')

    exv_x, ixv_x, ixv_y, eyv_y = check_channel_name(chname)
    exv_x = check_data(data, exv_x)
    ixv_x = check_data(data, ixv_x)