예제 #1
0
파일: mseed.py 프로젝트: shineusn/pyrocko
def save(traces, filename_template, additional={}, overwrite=True):
    from pyrocko import mseed_ext

    fn_tr = {}
    for tr in traces:
        for code, maxlen, val in zip(
            ['network', 'station', 'location', 'channel'], [2, 5, 2, 3],
                tr.nslc_id):

            if len(val) > maxlen:
                raise CodeTooLong(
                    '%s code too long to be stored in MSeed file: %s' %
                    (code, val))

        fn = tr.fill_template(filename_template, **additional)
        if not overwrite and os.path.exists(fn):
            raise FileSaveError('file exists: %s' % fn)

        if fn not in fn_tr:
            fn_tr[fn] = []

        fn_tr[fn].append(tr)

    for fn, traces_thisfile in fn_tr.items():
        trtups = []
        traces_thisfile.sort(lambda a, b: cmp(a.full_id, b.full_id))
        for tr in traces_thisfile:
            trtups.append(as_tuple(tr))

        ensuredirs(fn)
        try:
            mseed_ext.store_traces(trtups, fn)
        except mseed_ext.MSeedError, e:
            raise FileSaveError(
                str(e) + ' (while storing traces to file \'%s\')' % fn)
예제 #2
0
def save(traces, filename_template, additional={}):
    from pyrocko import mseed_ext
            
    fn_tr = {}
    for tr in traces:
        for code, maxlen, val in zip(
                ['network', 'station', 'location', 'channel'],
                [2, 5, 2, 3],
                tr.nslc_id):

            if len(val) > maxlen:
                raise CodeTooLong(
                        '%s code too long to be stored in MSeed file: %s' % 
                        (code, val))

        fn = tr.fill_template(filename_template, **additional)
        if fn not in fn_tr:
            fn_tr[fn] = []
        
        fn_tr[fn].append(tr)
        
    for fn, traces_thisfile in fn_tr.items():
        trtups = []
        traces_thisfile.sort(lambda a,b: cmp(a.full_id, b.full_id))
        for tr in traces_thisfile:
            trtups.append(as_tuple(tr))
        
        ensuredirs(fn)
        try:
            mseed_ext.store_traces(trtups, fn)
        except mseed_ext.MSeedError, e:
            raise FileSaveError( str(e) + ' (while storing traces to file \'%s\')' % fn)
예제 #3
0
    def available_tilenames(self):
        if self._available_tilenames is None:
            fpath = op.join(self.data_dir, 'available.list')
            if not op.exists(fpath) or os.stat(fpath).st_size == 0:
                util.ensuredirs(fpath)
                # remote structure changed, we would have to clawl through
                # many pages. Now keeping tile index here:
                self.download_file(
                    'https://mirror.pyrocko.org/e4ftl01.cr.usgs.gov/'
                    'MEASURES/SRTMGL3.003/2000.02.11/available.list', fpath)

                # url = self.raw_data_url + '/'
                # f = urlopen(url)
                # data = f.read().decode()
                # available = re.findall(
                #     r'([NS]\d\d[EW]\d\d\d)\.SRTMGL3\.hgt', data)
                #
                # f.close()
                #
                # with open(fpath, 'w') as f:
                #     f.writelines('%s\n' % s for s in available)

            with open(fpath, 'r') as f:
                available = [
                    s.strip() for s in f.readlines()
                    if re.match(r'^[NS]\d\d[EW]\d\d\d$', s.strip())
                ]

            self._available_tilenames = set(available)

        return self._available_tilenames
예제 #4
0
파일: srtmgl3.py 프로젝트: emolch/pyrocko
    def available_tilenames(self):
        if self._available_tilenames is None:
            fpath = op.join(self.data_dir, 'available.list')
            if not op.exists(fpath) or os.stat(fpath).st_size == 0:
                util.ensuredirs(fpath)
                # remote structure changed, we would have to clawl through
                # many pages. Now keeping tile index here:
                self.download_file(
                    'http://data.pyrocko.org/scratch/available.list', fpath)

                # url = self.raw_data_url + '/'
                # f = urlopen(url)
                # data = f.read().decode()
                # available = re.findall(
                #     r'([NS]\d\d[EW]\d\d\d)\.SRTMGL3\.hgt', data)
                #
                # f.close()
                #
                # with open(fpath, 'w') as f:
                #     f.writelines('%s\n' % s for s in available)

            with open(fpath, 'r') as f:
                available = [
                    s.strip() for s in f.readlines()
                    if re.match(r'^[NS]\d\d[EW]\d\d\d$', s.strip())]

            self._available_tilenames = set(available)

        return self._available_tilenames
예제 #5
0
def save(traces, filename_template, additional={}, max_open_files=10):
    fns = set()
    open_files = {}
    
    def close_files():
        while open_files:
            open_files.popitem()[1].close()
            
    for tr in traces:
        fn = tr.fill_template(filename_template, **additional)
        
        if fn not in open_files:
            if len(open_files) >= max_open_files:
                close_files()
                
            if fn not in fns: 
                ensuredirs(fn)
            
            open_files[fn] = open(fn, 'wa'[fn in fns])            
            fns.add(fn)
        
        tf = TracesFileIO(open_files[fn])
        tf.save([tr])
        tf.close()
        
    close_files()
            
    return list(fns)
예제 #6
0
def save(traces, filename_template, additional={}, max_open_files=10,
         overwrite=True):

    fns = set()
    open_files = {}

    def close_files():
        while open_files:
            open_files.popitem()[1].close()

    for tr in traces:
        fn = tr.fill_template(filename_template, **additional)

        if fn not in open_files:
            if len(open_files) >= max_open_files:
                close_files()

            if fn not in fns:
                if not overwrite and os.path.exists(fn):
                    raise FileSaveError('file exists: %s' % fn)

                ensuredirs(fn)

            open_files[fn] = open(fn, 'wa'[fn in fns])
            fns.add(fn)

        tf = TracesFileIO(open_files[fn])
        tf.save([tr])
        tf.close()

    close_files()

    return list(fns)
예제 #7
0
    def create_grond_files(self):
        logger.info('Creating Grond configuration for %s' %
                    ' and '.join([obs.name for obs in self.observations]))

        config_path = op.join(self.project_dir, self.get_grond_config_path())
        util.ensuredirs(config_path)
        grond.write_config(self.get_grond_config(), config_path)
예제 #8
0
    def create_group_automap(self, config, iter_item_figure, **kwargs):
        group = PlotGroup(
            formats=guts.clone(config.formats),
            size_cm=config.size_cm,
            name=config.name,
            variant=config.variant,
            **kwargs)

        path_group = self.path_group(group=group)
        if os.path.exists(path_group):
            self.remove_group_files(path_group)

        group_ref = (group.name, group.variant)
        if group_ref in self._collection.group_refs:
            self._collection.group_refs.remove(group_ref)

        self.dump_collection()

        for item, automap in iter_item_figure:
            group.items.append(item)
            for format in group.formats:
                path = self.path_image(group, item, format)
                util.ensuredirs(path)
                format.render_automap(
                    automap,
                    path=path,
                    resolution=format.get_dpi(group.size_cm))

                logger.info('Figure saved: %s' % path)

        util.ensuredirs(path_group)
        group.dump(filename=path_group)
        self._collection.group_refs.append(group_ref)
        self.dump_collection()
예제 #9
0
파일: prepare.py 프로젝트: woxin5295/kiwi
def save_rapid_station_table(stations_path, stations):
    '''Save station table in format for rapidinv'''
    util.ensuredirs(stations_path)
    f = open(stations_path, 'w')
    for istation, station in enumerate(stations):
        f.write('%4i %-10s %15.8e %15.8e\n' %
                (istation, station.station, station.lat, station.lon))
    f.close()
예제 #10
0
파일: fomosto.py 프로젝트: emolch/pyrocko
def command_tttextract(args):
    def setup(parser):
        parser.add_option(
            '--output', dest='output_fn', metavar='TEMPLATE',
            help='output to text files instead of stdout '
                 '(example TEMPLATE: "extracted/%(args)s.txt")')

    parser, options, args = cl_parse('tttextract', args, setup=setup)
    try:
        sdef = args.pop()
    except Exception:
        parser.error('cannot get <selection> argument')

    try:
        sphase = args.pop()
    except Exception:
        parser.error('cannot get <phase> argument')

    try:
        phases = [gf.meta.Timing(x.strip()) for x in sphase.split(',')]
    except gf.meta.InvalidTimingSpecification:
        parser.error('invalid phase specification: "%s"' % sphase)

    try:
        gdef = gf.meta.parse_grid_spec(sdef)
    except gf.meta.GridSpecError as e:
        die(e)

    store_dir = get_store_dir(args)

    try:
        store = gf.Store(store_dir)
        for args in store.config.iter_extraction(gdef, level=-1):
            s = ['%e' % x for x in args]
            for phase in phases:
                t = store.t(phase, args)
                if t is not None:
                    s.append('%e' % t)
                else:
                    s.append('nan')

            if options.output_fn:
                d = dict(
                    args='_'.join('%e' % x for x in args),
                    extension='txt')

                fn = options.output_fn % d
                util.ensuredirs(fn)
                with open(fn, 'a') as f:
                    f.write(' '.join(s))
                    f.write('\n')
            else:
                print(' '.join(s))

    except (gf.meta.GridSpecError, gf.StoreError, gf.meta.OutOfBounds) as e:
        die(e)
예제 #11
0
파일: gse2.py 프로젝트: wuxyair/pyrocko
def save(traces,
         filename_template,
         additional={},
         max_open_files=10,
         overwrite=True):

    from pyrocko import info
    from . import ims

    fns = set()
    open_files = {}

    def close_files():
        while open_files:
            open_files.popitem()[1].close()

    for tr in traces:
        fn = tr.fill_template(filename_template, **additional)
        if fn not in open_files:
            if len(open_files) >= max_open_files:
                close_files()

            if fn not in fns:
                if not overwrite and os.path.exists(fn):
                    raise FileSaveError('file exists: %s' % fn)

                ensuredirs(fn)

            open_files[fn] = open(fn, ['wb', 'ab'][fn in fns])
            writer = ims.Writer(open_files[fn])
            writer.write(
                ims.MessageHeader(version='GSE2.1',
                                  type='DATA',
                                  msg_id=ims.MsgID(msg_id_string=randomid(),
                                                   msg_id_source='Pyrocko_%s' %
                                                   info.version)))

            writer.write(
                ims.WaveformSection(
                    datatype=ims.DataType(type='WAVEFORM', format='GSE2.1')))

            fns.add(fn)

        sec = ims.WID2Section.from_pyrocko_trace(tr, None, None, None, None)
        writer = ims.Writer(open_files[fn])
        writer.write(sec)

    for fn in fns:
        if fn not in open_files:
            open_files[fn] = open(fn, 'ab')

        writer = ims.Writer(open_files[fn])
        writer.write(ims.Stop())
        open_files.pop(fn).close()

    return list(fns)
예제 #12
0
def command_tttextract(args):
    def setup(parser):
        parser.add_option('--output',
                          dest='output_fn',
                          metavar='TEMPLATE',
                          help='output to text files instead of stdout '
                          '(example TEMPLATE: "extracted/%(args)s.txt")')

    parser, options, args = cl_parse('tttextract', args, setup=setup)
    try:
        sdef = args.pop()
    except Exception:
        parser.error('cannot get <selection> argument')

    try:
        sphase = args.pop()
    except Exception:
        parser.error('cannot get <phase> argument')

    try:
        phases = [gf.meta.Timing(x.strip()) for x in sphase.split(',')]
    except gf.meta.InvalidTimingSpecification:
        parser.error('invalid phase specification: "%s"' % sphase)

    try:
        gdef = gf.meta.parse_grid_spec(sdef)
    except gf.meta.GridSpecError as e:
        die(e)

    store_dir = get_store_dir(args)

    try:
        store = gf.Store(store_dir)
        for args in store.config.iter_extraction(gdef, level=-1):
            s = ['%e' % x for x in args]
            for phase in phases:
                t = store.t(phase, args)
                if t is not None:
                    s.append('%e' % t)
                else:
                    s.append('nan')

            if options.output_fn:
                d = dict(args='_'.join('%e' % x for x in args),
                         extension='txt')

                fn = options.output_fn % d
                util.ensuredirs(fn)
                with open(fn, 'w') as f:
                    f.write(' '.join(s))
                    f.write('\n')
            else:
                print(' '.join(s))

    except (gf.meta.GridSpecError, gf.StoreError, gf.meta.OutOfBounds) as e:
        die(e)
예제 #13
0
def get_test_data(path):
    fn = test_data_path(path)
    if not op.exists(fn):
        util.ensuredirs(fn)
        if path.endswith('/'):
            util.download_dir(op.join(url, path), fn)
        else:
            util.download_file(op.join(url, path), fn)

    return fn
예제 #14
0
파일: iris_pull.py 프로젝트: simondaout/bat
def combi_get_responses(stations, time, fn_template):
    from pyrocko.fdsn import ws
    from pyrocko.fdsn import station as fdsnstation

    def fn(net, sta, loc, cha):
        return fn_template % dict(
            network=net, station=sta, location=loc, channel=cha)

    def iter_nslcs(site=None, ignore=None):
        for station in stations:
            if site is not None and site not in station.datacenters:
                continue

            for channel in station.get_channels():
                nslc = station.nsl() + (channel.name, )
                if ignore is None or nslc not in ignore:
                    yield nslc

    responses = {}
    for nslc in iter_nslcs():
        if os.path.exists(fn(*nslc)):
            responses[nslc] = pload(fn(*nslc))

    for site in ['geofon', 'iris']:
        selection = []
        for nslc in iter_nslcs(site=site, ignore=responses):
            selection.append(nslc + (time, time + 1.0))

        if selection:
            logger.info('downloading response information (%s)' % site)
            sxs = ws.station(site=site, level='response', selection=selection)

            for nslc_tspan in selection:
                nslc = nslc_tspan[:4]
                timespan = nslc_tspan[4:]
                try:
                    response = sxs.get_pyrocko_response(nslc,
                                                        timespan=timespan,
                                                        fake_input_units='M')

                    util.ensuredirs(fn(*nslc))
                    pdump(response, fn(*nslc))
                    responses[nslc] = response

                except (fdsnstation.NoResponseInformation,
                        fdsnstation.MultipleResponseInformation):
                    pass

    for station in stations:
        for channel in station.get_channels():
            nslc = station.nsl() + (channel.name, )
            if nslc in responses:
                channel.response = responses[nslc]
            else:
                channel.response = None
예제 #15
0
def save(traces, filename_template, additional={}, max_open_files=10,
         overwrite=True):

    from pyrocko import ims, info

    fns = set()
    open_files = {}

    def close_files():
        while open_files:
            open_files.popitem()[1].close()

    for tr in traces:
        fn = tr.fill_template(filename_template, **additional)
        if fn not in open_files:
            if len(open_files) >= max_open_files:
                close_files()

            if fn not in fns:
                if not overwrite and os.path.exists(fn):
                    raise FileSaveError('file exists: %s' % fn)

                ensuredirs(fn)

            open_files[fn] = open(fn, 'wa'[fn in fns])
            writer = ims.Writer(open_files[fn])
            writer.write(
                ims.MessageHeader(
                    version='GSE2.1',
                    type='DATA',
                    msg_id=ims.MsgID(
                        msg_id_string=randomid(),
                        msg_id_source='Pyrocko_%s' % info.version)))

            writer.write(ims.WaveformSection(
                datatype=ims.DataType(
                    type='WAVEFORM',
                    format='GSE2.1')))

            fns.add(fn)

        sec = ims.WID2Section.from_pyrocko_trace(tr, None, None, None, None)
        writer = ims.Writer(open_files[fn])
        writer.write(sec)

    for fn in fns:
        if fn not in open_files:
            open_files[fn] = open(fn, 'a')

        writer = ims.Writer(open_files[fn])
        writer.write(ims.Stop())
        open_files.pop(fn).close()

    return list(fns)
예제 #16
0
    def make_tile(self, itx, ity, fpath):
        nh = self.ndeci // 2
        xmin = self.xmin + itx*self.stx - self.base.dx * nh
        xmax = self.xmin + (itx+1)*self.stx + self.base.dx * nh
        ymin = self.ymin + ity*self.sty - self.base.dy * nh
        ymax = self.ymin + (ity+1)*self.sty + self.base.dy * nh

        t = self.base.get_with_repeat((xmin, xmax, ymin, ymax))
        if t is not None:
            t.decimate(self.ndeci)

        util.ensuredirs(fpath)
        with open(fpath, 'w') as f:
            if t is not None:
                t.data.tofile(f)
예제 #17
0
    def make_tile(self, itx, ity, fpath):
        nh = self.ndeci // 2
        xmin = self.xmin + itx * self.stx - self.base.dx * nh
        xmax = self.xmin + (itx + 1) * self.stx + self.base.dx * nh
        ymin = self.ymin + ity * self.sty - self.base.dy * nh
        ymax = self.ymin + (ity + 1) * self.sty + self.base.dy * nh

        t = self.base.get_with_repeat((xmin, xmax, ymin, ymax))
        if t is not None:
            t.decimate(self.ndeci)

        util.ensuredirs(fpath)
        with open(fpath, 'w') as f:
            if t is not None:
                t.data.tofile(f)
예제 #18
0
    def create_group_mpl(self, config, iter_item_figure, **kwargs):
        from matplotlib import pyplot as plt
        group = PlotGroup(
            formats=guts.clone(config.formats),
            size_cm=config.size_cm,
            name=config.name,
            variant=config.variant,
            **kwargs)

        path_group = self.path_group(group=group)
        if os.path.exists(path_group):
            self.remove_group_files(path_group)

        group_ref = (group.name, group.variant)
        if group_ref in self._collection.group_refs:
            self._collection.group_refs.remove(group_ref)

        self.dump_collection()

        figs_to_close = []
        for item, fig in iter_item_figure:
            group.items.append(item)
            for format in group.formats:
                path = self.path_image(group, item, format)
                util.ensuredirs(path)
                format.render_mpl(
                    fig,
                    path=path,
                    dpi=format.get_dpi(group.size_cm))

                logger.info('Figure saved: %s' % path)

            if not self._show:
                plt.close(fig)
            else:
                figs_to_close.append(fig)

        util.ensuredirs(path_group)
        group.validate()
        group.dump(filename=path_group)
        self._collection.group_refs.append(group_ref)
        self.dump_collection()

        if self._show:
            plt.show()

        for fig in figs_to_close:
            plt.close(fig)
예제 #19
0
def save(traces,
         filename_template,
         additional={},
         overwrite=True,
         steim=1,
         record_length=4096):
    from pyrocko import mseed_ext

    assert record_length in VALID_RECORD_LENGTHS

    fn_tr = {}
    for tr in traces:
        for code, maxlen, val in zip(
            ['network', 'station', 'location', 'channel'], [2, 5, 2, 3],
                tr.nslc_id):

            if len(val) > maxlen:
                raise CodeTooLong(
                    '%s code too long to be stored in MSeed file: %s' %
                    (code, val))

        fn = tr.fill_template(filename_template, **additional)
        if not overwrite and os.path.exists(fn):
            raise FileSaveError('File exists: %s' % fn)

        if fn not in fn_tr:
            fn_tr[fn] = []

        fn_tr[fn].append(tr)

    for fn, traces_thisfile in fn_tr.items():
        trtups = []
        traces_thisfile.sort(key=lambda a: a.full_id)
        for tr in traces_thisfile:
            trtups.append(as_tuple(tr))

        ensuredirs(fn)
        try:
            mseed_ext.store_traces(trtups,
                                   fn,
                                   record_length=record_length,
                                   steim=steim)
        except mseed_ext.MSeedError as e:
            raise FileSaveError(
                str(e) + ' (while storing traces to file \'%s\')' % fn)

    return list(fn_tr.keys())
예제 #20
0
    def create_scenario(self,
                        force=False,
                        interactive=True,
                        gf_store_superdirs=None,
                        make_map=True):

        logger.info('Creating scenario...')

        scenario = self.get_scenario()
        self.create_project_dir(force)
        util.ensuredir(self.get_gf_stores_dir())

        data_dir = op.join(self.project_dir, self.data_dir)
        util.ensuredir(data_dir)

        scenario.dump(filename=op.join(data_dir, 'scenario.yml'))

        if gf_store_superdirs is None:
            engine1 = gf.LocalEngine(
                use_config=True, store_superdirs=[self.get_gf_stores_dir()])
        else:
            engine1 = gf.LocalEngine(use_config=False,
                                     store_superdirs=gf_store_superdirs)

        scenario.init_modelling(engine=engine1)

        scenario.ensure_gfstores(interactive=interactive)
        self.symlink_gfstores(engine1)

        engine2 = gf.LocalEngine(use_config=False,
                                 store_superdirs=[self.get_gf_stores_dir()])

        scenario.init_modelling(engine=engine2)

        scenario.dump_data(path=data_dir)
        if make_map:
            scenario.make_map(op.join(self.project_dir, 'scenario_map.pdf'))

        shutil.move(op.join(data_dir, 'sources.yml'),
                    op.join(data_dir, 'scenario_sources.yml'))

        markers = scenario.get_onsets()
        marker_path = op.join(data_dir, 'picks', 'picks.markers')
        if markers:
            util.ensuredirs(marker_path)
            pmarker.save_markers(markers, marker_path)
예제 #21
0
def download_file(fn, dirpath):
    url = base_url + '/' + fn
    fpath = op.join(dirpath, fn)
    logger.info('starting download of %s' % url)

    util.ensuredirs(fpath)
    f = urlopen(url)
    fpath_tmp = fpath + '.%i.temp' % os.getpid()
    g = open(fpath_tmp, 'wb')
    while True:
        data = f.read(1024)
        if not data:
            break
        g.write(data)

    g.close()
    f.close()

    os.rename(fpath_tmp, fpath)

    logger.info('finished download of %s' % url)
예제 #22
0
def download_file(fn, dirpath):
    import urllib2
    url = base_url + '/' + fn
    fpath = op.join(dirpath, fn)
    logger.info('starting download of %s' % url)

    util.ensuredirs(fpath)
    f = urllib2.urlopen(url)
    fpath_tmp = fpath + '.%i.temp' % os.getpid()
    g = open(fpath_tmp, 'wb')
    while True:
        data = f.read(1024)
        if not data:
            break
        g.write(data)

    g.close()
    f.close()

    os.rename(fpath_tmp, fpath)

    logger.info('finished download of %s' % url)
예제 #23
0
def iris_get_responses(stations, time, fn_template):
    for sta in stations:
        fn = fn_template % dict(
            network=sta.network, station=sta.station, location=sta.location)
        if not os.path.isfile(fn):
            try:
                fi = iris_ws.ws_resp(sta.network,
                                     sta.station,
                                     sta.location,
                                     '*',
                                     time=time)

                util.ensuredirs(fn)
                fo = open(fn, 'w')
                while True:
                    data = fi.read(1024)
                    if not data:
                        break
                    fo.write(data)

                fo.close()
                fi.close()
            except iris_ws.NotFound:
                pass

        for cha in sta.get_channels():

            class DummyTrace:
                pass

            tr = DummyTrace()
            tr.tmin = time
            tr.tmax = time
            tr.nslc_id = (sta.network, sta.station, sta.location, cha.name)
            if os.path.exists(fn):
                cha.inv_response = trace.InverseEvalresp(fn, tr)
            else:
                cha.inv_response = None
예제 #24
0
    def available_tilenames(self):
        if self._available_tilenames is None:
            fpath = op.join(self.data_dir, 'available.list')
            if op.exists(fpath):
                with open(fpath, 'r') as f:
                    available = [s.strip() for s in f.readlines()]

            else:
                url = self.raw_data_url + '/'
                f = urllib2.urlopen(url)
                data = f.read()
                available = re.findall(
                    r'([NS]\d\d[EW]\d\d\d)\.SRTMGL3\.hgt', data)

                f.close()

                util.ensuredirs(fpath)
                with open(fpath, 'w') as f:
                    f.writelines('%s\n' % s for s in available)

            self._available_tilenames = set(available)

        return self._available_tilenames
예제 #25
0
    def available_tilenames(self):
        if self._available_tilenames is None:
            fpath = op.join(self.data_dir, 'available.list')
            if op.exists(fpath):
                with open(fpath, 'r') as f:
                    available = [s.strip() for s in f.readlines()]

            else:
                url = self.raw_data_url + '/'
                f = urlopen(url)
                data = f.read().decode()
                available = re.findall(r'([NS]\d\d[EW]\d\d\d)\.SRTMGL3\.hgt',
                                       data)

                f.close()

                util.ensuredirs(fpath)
                with open(fpath, 'w') as f:
                    f.writelines('%s\n' % s for s in available)

            self._available_tilenames = set(available)

        return self._available_tilenames
예제 #26
0
def save(traces,
         filename_template,
         additional={},
         max_open_files=10,
         overwrite=True):

    fns = set()
    open_files = {}

    def close_files():
        while open_files:
            open_files.popitem()[1].close()

    for tr in traces:
        fn = tr.fill_template(filename_template, **additional)

        if fn not in open_files:
            if len(open_files) >= max_open_files:
                close_files()

            if fn not in fns:
                if not overwrite and os.path.exists(fn):
                    raise FileSaveError('file exists: %s' % fn)

                ensuredirs(fn)

            open_files[fn] = open(fn, 'wa'[fn in fns])
            fns.add(fn)

        tf = TracesFileIO(open_files[fn])
        tf.save([tr])
        tf.close()

    close_files()

    return list(fns)
예제 #27
0
def save(traces,
         filename_template,
         format='mseed',
         additional={},
         stations=None,
         overwrite=True):
    '''Save traces to file(s).

    :param traces: a trace or an iterable of traces to store
    :param filename_template: filename template with placeholders for trace
            metadata. Uses normal python '%%(placeholder)s' string templates.
            The following placeholders are considered: ``network``,
            ``station``, ``location``, ``channel``, ``tmin``
            (time of first sample), ``tmax`` (time of last sample),
            ``tmin_ms``, ``tmax_ms``, ``tmin_us``, ``tmax_us``. The versions
            with '_ms' include milliseconds, the versions with '_us' include
            microseconds.
    :param format: %s
    :param additional: dict with custom template placeholder fillins.
    :param overwrite': if ``False``, raise an exception if file exists
    :returns: list of generated filenames

    .. note::
        Network, station, location, and channel codes may be silently truncated
        to file format specific maximum lengthes. 
    '''

    if isinstance(traces, trace.Trace):
        traces = [traces]

    if format == 'from_extension':
        format = os.path.splitext(filename_template)[1][1:]

    if format == 'mseed':
        return mseed.save(traces,
                          filename_template,
                          additional,
                          overwrite=overwrite)

    elif format == 'gse2':
        return gse2_io_wrap.save(traces,
                                 filename_template,
                                 additional,
                                 overwrite=overwrite)

    elif format == 'sac':
        fns = []
        for tr in traces:
            fn = tr.fill_template(filename_template, **additional)
            if not overwrite and os.path.exists(fn):
                raise FileSaveError('file exists: %s' % fn)

            util.ensuredirs(fn)

            f = sac.SacFile(from_trace=tr)
            if stations:
                s = stations[tr.network, tr.station, tr.location]
                f.stla = s.lat
                f.stlo = s.lon
                f.stel = s.elevation
                f.stdp = s.depth
                f.cmpinc = s.get_channel(tr.channel).dip + 90.
                f.cmpaz = s.get_channel(tr.channel).azimuth

            f.write(fn)
            fns.append(fn)

        return fns

    elif format == 'text':
        fns = []
        for tr in traces:
            fn = tr.fill_template(filename_template, **additional)
            if not overwrite and os.path.exists(fn):
                raise FileSaveError('file exists: %s' % fn)

            util.ensuredirs(fn)
            x, y = tr.get_xdata(), tr.get_ydata()
            num.savetxt(fn, num.transpose((x, y)))
            fns.append(fn)
        return fns

    elif format == 'yaff':
        return yaff.save(traces,
                         filename_template,
                         additional,
                         overwrite=overwrite)
    else:
        raise UnsupportedFormat(format)
예제 #28
0
def main():
    parser = OptionParser(usage=usage, description=description)

    parser.add_option('--force',
                      dest='force',
                      action='store_true',
                      default=False,
                      help='allow recreation of output <directory>')

    parser.add_option('--debug',
                      dest='debug',
                      action='store_true',
                      default=False,
                      help='print debugging information to stderr')

    parser.add_option('--dry-run',
                      dest='dry_run',
                      action='store_true',
                      default=False,
                      help='show available stations/channels and exit '
                      '(do not download waveforms)')

    parser.add_option('--continue',
                      dest='continue_',
                      action='store_true',
                      default=False,
                      help='continue download after a accident')

    parser.add_option('--local-data',
                      dest='local_data',
                      action='append',
                      help='add file/directory with local data')

    parser.add_option('--local-stations',
                      dest='local_stations',
                      action='append',
                      help='add local stations file')

    parser.add_option('--selection',
                      dest='selection_file',
                      action='append',
                      help='add local stations file')

    parser.add_option(
        '--local-responses-resp',
        dest='local_responses_resp',
        action='append',
        help='add file/directory with local responses in RESP format')

    parser.add_option('--local-responses-pz',
                      dest='local_responses_pz',
                      action='append',
                      help='add file/directory with local pole-zero responses')

    parser.add_option(
        '--local-responses-stationxml',
        dest='local_responses_stationxml',
        help='add file with local response information in StationXML format')

    parser.add_option(
        '--window',
        dest='window',
        default='full',
        help='set time window to choose [full, p, "<time-start>,<time-end>"'
        '] (time format is YYYY-MM-DD HH:MM:SS)')

    parser.add_option(
        '--out-components',
        choices=['enu', 'rtu'],
        dest='out_components',
        default='rtu',
        help='set output component orientations to radial-transverse-up [rtu] '
        '(default) or east-north-up [enu]')

    parser.add_option('--out-units',
                      choices=['M', 'M/S', 'M/S**2'],
                      dest='output_units',
                      default='M',
                      help='set output units to displacement "M" (default),'
                      ' velocity "M/S" or acceleration "M/S**2"')

    parser.add_option(
        '--padding-factor',
        type=float,
        default=3.0,
        dest='padding_factor',
        help='extend time window on either side, in multiples of 1/<fmin_hz> '
        '(default: 5)')

    parser.add_option(
        '--zero-padding',
        dest='zero_pad',
        action='store_true',
        default=False,
        help='Extend traces by zero-padding if clean restitution requires'
        'longer windows')

    parser.add_option(
        '--credentials',
        dest='user_credentials',
        action='append',
        default=[],
        metavar='SITE,USER,PASSWD',
        help='user credentials for specific site to access restricted data '
        '(this option can be repeated)')

    parser.add_option(
        '--token',
        dest='auth_tokens',
        metavar='SITE,FILENAME',
        action='append',
        default=[],
        help='user authentication token for specific site to access '
        'restricted data (this option can be repeated)')

    parser.add_option(
        '--sites',
        dest='sites',
        metavar='SITE1,SITE2,...',
        default='geofon,iris,orfeus',
        help='sites to query (available: %s, default: "%%default"' %
        ', '.join(g_sites_available))

    parser.add_option(
        '--band-codes',
        dest='priority_band_code',
        metavar='V,L,M,B,H,S,E,...',
        default='B,H',
        help='select and prioritize band codes (default: %default)')

    parser.add_option(
        '--instrument-codes',
        dest='priority_instrument_code',
        metavar='H,L,G,...',
        default='H,L',
        help='select and prioritize instrument codes (default: %default)')

    parser.add_option('--radius-min',
                      dest='radius_min',
                      metavar='VALUE',
                      default=0.0,
                      type=float,
                      help='minimum radius [km]')

    parser.add_option('--nstations-wanted',
                      dest='nstations_wanted',
                      metavar='N',
                      type=int,
                      help='number of stations to select initially')

    (options, args) = parser.parse_args(sys.argv[1:])

    print('Parsed arguments:', args)
    if len(args) not in (10, 7, 6):
        parser.print_help()
        sys.exit(1)

    if options.debug:
        util.setup_logging(program_name, 'debug')
    else:
        util.setup_logging(program_name, 'info')

    if options.local_responses_pz and options.local_responses_resp:
        logger.critical('cannot use local responses in PZ and RESP '
                        'format at the same time')
        sys.exit(1)

    n_resp_opt = 0
    for resp_opt in (options.local_responses_pz, options.local_responses_resp,
                     options.local_responses_stationxml):

        if resp_opt:
            n_resp_opt += 1

    if n_resp_opt > 1:
        logger.critical('can only handle local responses from either PZ or '
                        'RESP or StationXML. Cannot yet merge different '
                        'response formats.')
        sys.exit(1)

    if options.local_responses_resp and not options.local_stations:
        logger.critical('--local-responses-resp can only be used '
                        'when --stations is also given.')
        sys.exit(1)

    try:
        ename = ''
        magnitude = None
        mt = None
        if len(args) == 10:
            time = util.str_to_time(args[1] + ' ' + args[2])
            lat = float(args[3])
            lon = float(args[4])
            depth = float(args[5]) * km
            iarg = 6

        elif len(args) == 7:
            if args[2].find(':') == -1:
                sname_or_date = None
                lat = float(args[1])
                lon = float(args[2])
                event = None
                time = None
            else:
                sname_or_date = args[1] + ' ' + args[2]

            iarg = 3

        elif len(args) == 6:
            sname_or_date = args[1]
            iarg = 2

        if len(args) in (7, 6) and sname_or_date is not None:
            events = get_events_by_name_or_date([sname_or_date],
                                                catalog=geofon)
            if len(events) == 0:
                logger.critical('no event found')
                sys.exit(1)
            elif len(events) > 1:
                logger.critical('more than one event found')
                sys.exit(1)

            event = events[0]
            time = event.time
            lat = event.lat
            lon = event.lon
            depth = event.depth
            ename = event.name
            magnitude = event.magnitude
            mt = event.moment_tensor

        radius = float(args[iarg]) * km
        fmin = float(args[iarg + 1])
        sample_rate = float(args[iarg + 2])

        eventname = args[iarg + 3]
        cwd = str(sys.argv[1])
        event_dir = op.join(cwd, 'data', 'events', eventname)
        output_dir = op.join(event_dir, 'waveforms')
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(time=time,
                            lat=lat,
                            lon=lon,
                            depth=depth,
                            name=ename,
                            magnitude=magnitude,
                            moment_tensor=mt)

    if options.window == 'full':
        if event is None:
            logger.critical('need event for --window=full')
            sys.exit(1)

        low_velocity = 1500.
        timewindow = VelocityWindow(low_velocity,
                                    tpad=options.padding_factor / fmin)

        tmin, tmax = timewindow(time, radius, depth)

    elif options.window == 'p':
        if event is None:
            logger.critical('need event for --window=p')
            sys.exit(1)

        phases = list(map(cake.PhaseDef, 'P p'.split()))
        emod = cake.load_model()

        tpad = options.padding_factor / fmin
        timewindow = PhaseWindow(emod, phases, -tpad, tpad)

        arrivaltimes = []
        for dist in num.linspace(0, radius, 20):
            try:
                arrivaltimes.extend(timewindow(time, dist, depth))
            except NoArrival:
                pass

        if not arrivaltimes:
            logger.error('required phase arrival not found')
            sys.exit(1)

        tmin = min(arrivaltimes)
        tmax = max(arrivaltimes)

    else:
        try:
            stmin, stmax = options.window.split(',')
            tmin = util.str_to_time(stmin.strip())
            tmax = util.str_to_time(stmax.strip())

            timewindow = FixedWindow(tmin, tmax)

        except ValueError:
            logger.critical('invalid argument to --window: "%s"' %
                            options.window)
            sys.exit(1)

    if event is not None:
        event.name = eventname

    tfade = tfade_factor / fmin

    tpad = tfade

    tmin -= tpad
    tmax += tpad

    tinc = None

    priority_band_code = options.priority_band_code.split(',')
    for s in priority_band_code:
        if len(s) != 1:
            logger.critical('invalid band code: %s' % s)

    priority_instrument_code = options.priority_instrument_code.split(',')
    for s in priority_instrument_code:
        if len(s) != 1:
            logger.critical('invalid instrument code: %s' % s)

    station_query_conf = dict(latitude=lat,
                              longitude=lon,
                              minradius=options.radius_min * km * cake.m2d,
                              maxradius=radius * cake.m2d,
                              channel=','.join('%s??' % s
                                               for s in priority_band_code))

    target_sample_rate = sample_rate

    fmax = target_sample_rate

    # target_sample_rate = None
    # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S']

    priority_units = ['M/S', 'M', 'M/S**2']

    # output_units = 'M'

    sites = [x.strip() for x in options.sites.split(',') if x.strip()]

    for site in sites:
        if site not in g_sites_available:
            logger.critical('unknown FDSN site: %s' % site)
            sys.exit(1)

    for s in options.user_credentials:
        try:
            site, user, passwd = s.split(',')
            g_user_credentials[site] = user, passwd
        except ValueError:
            logger.critical('invalid format for user credentials: "%s"' % s)
            sys.exit(1)

    for s in options.auth_tokens:
        try:
            site, token_filename = s.split(',')
            with open(token_filename, 'r') as f:
                g_auth_tokens[site] = f.read()
        except (ValueError, OSError, IOError):
            logger.critical('cannot get token from file: %s' % token_filename)
            sys.exit(1)

    fn_template0 = \
        'data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed'

    fn_template_raw = op.join(output_dir, 'raw', fn_template0)
    fn_stations_raw = op.join(output_dir, 'stations.raw.txt')
    fn_template_rest = op.join(output_dir, 'rest', fn_template0)
    fn_commandline = op.join(output_dir, 'beatdown.command')

    ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax)

    # chapter 1: download

    sxs = []
    for site in sites:
        try:
            extra_args = {
                'iris': dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == 'geonet':
                extra_args.update(starttime=tmin, endtime=tmax)
            else:
                extra_args.update(startbefore=tmax,
                                  endafter=tmin,
                                  includerestricted=(site in g_user_credentials
                                                     or site in g_auth_tokens))

            logger.info('downloading channel information (%s)' % site)
            sx = fdsn.station(site=site,
                              format='text',
                              level='channel',
                              **extra_args)

        except fdsn.EmptyResult:
            logger.error('No stations matching given criteria. (%s)' % site)
            sx = None

        if sx is not None:
            sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)

    nsl_to_sites = defaultdict(list)
    nsl_to_station = {}

    if options.selection_file:
        logger.info('using stations from stations file!')
        stations = []
        for fn in options.selection_file:
            stations.extend(model.load_stations(fn))

        nsls_selected = set(s.nsl() for s in stations)
    else:
        nsls_selected = None

    for sx, site in zip(sxs, sites):
        site_stations = sx.get_pyrocko_stations()
        for s in site_stations:
            nsl = s.nsl()

            nsl_to_sites[nsl].append(site)
            if nsl not in nsl_to_station:
                if nsls_selected:
                    if nsl in nsls_selected:
                        nsl_to_station[nsl] = s
                else:
                    nsl_to_station[
                        nsl] = s  # using first site with this station

        logger.info('number of stations found: %i' % len(nsl_to_station))

    # station weeding
    if options.nstations_wanted:
        nsls_selected = None
        stations_all = [
            nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())
        ]

        for s in stations_all:
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(stations_all,
                                                  options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info('number of stations selected: %i' % len(nsls_selected))

    if tinc is None:
        tinc = 3600.

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
        logger.info('time window %i/%i (%s - %s)' %
                    (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win)))

        have_data_this_window = set()
        if options.continue_:
            trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False)
            for tr in trs_avail:
                have_data_this_window.add(tr.nslc_id)
        for site, sx in zip(sites, sxs):
            if sx is None:
                continue

            selection = []
            channels = sx.choose_channels(
                target_sample_rate=target_sample_rate,
                priority_band_code=priority_band_code,
                priority_units=priority_units,
                priority_instrument_code=priority_instrument_code,
                timespan=(tmin_win, tmax_win))

            for nslc in sorted(channels.keys()):
                if nsls_selected is not None and nslc[:3] not in nsls_selected:
                    continue

                could_have_data_site[site].add(nslc)

                if nslc not in have_data_this_window:
                    channel = channels[nslc]
                    if event:
                        lat_, lon_ = event.lat, event.lon
                    else:
                        lat_, lon_ = lat, lon
                    try:
                        dist = orthodrome.distance_accurate50m_numpy(
                            lat_, lon_, channel.latitude.value,
                            channel.longitude.value)
                    except:
                        dist = orthodrome.distance_accurate50m_numpy(
                            lat_, lon_, channel.latitude, channel.longitude)

                    if event:
                        depth_ = event.depth
                        time_ = event.time
                    else:
                        depth_ = None
                        time_ = None

                    tmin_, tmax_ = timewindow(time_, dist, depth_)

                    tmin_this = tmin_ - tpad
                    tmax_this = float(tmax_ + tpad)

                    tmin_req = max(tmin_win, tmin_this)
                    tmax_req = min(tmax_win, tmax_this)
                    if channel.sample_rate:
                        try:
                            deltat = 1.0 / int(channel.sample_rate.value)
                        except:
                            deltat = 1.0 / int(channel.sample_rate)
                    else:
                        deltat = 1.0

                    if tmin_req < tmax_req:
                        logger.debug('deltat %f' % deltat)
                        # extend time window by some samples because otherwise
                        # sometimes gaps are produced
                        # apparently the WS are only sensitive to full seconds
                        # round to avoid gaps, increase safetiy window
                        selection.append(nslc +
                                         (math.floor(tmin_req - deltat * 20.0),
                                          math.ceil(tmax_req + deltat * 20.0)))
            if options.dry_run:
                for (net, sta, loc, cha, tmin, tmax) in selection:
                    available_through[net, sta, loc, cha].add(site)

            else:
                neach = 100
                i = 0
                nbatches = ((len(selection) - 1) // neach) + 1
                while i < len(selection):
                    selection_now = selection[i:i + neach]
                    f = tempfile.NamedTemporaryFile()
                    try:
                        sbatch = ''
                        if nbatches > 1:
                            sbatch = ' (batch %i/%i)' % (
                                (i // neach) + 1, nbatches)

                        logger.info('downloading data (%s)%s' % (site, sbatch))
                        data = fdsn.dataselect(site=site,
                                               selection=selection_now,
                                               **get_user_credentials(site))

                        while True:
                            buf = data.read(1024)
                            if not buf:
                                break
                            f.write(buf)

                        f.flush()

                        trs = io.load(f.name)
                        for tr in trs:
                            tr.fix_deltat_rounding_errors()
                            logger.debug('cutting window: %f - %f' %
                                         (tmin_win, tmax_win))
                            logger.debug(
                                'available window: %f - %f, nsamples: %g' %
                                (tr.tmin, tr.tmax, tr.ydata.size))
                            try:
                                logger.debug('tmin before snap %f' % tr.tmin)
                                tr.snap(interpolate=True)
                                logger.debug('tmin after snap %f' % tr.tmin)
                                tr.chop(tmin_win,
                                        tmax_win,
                                        snap=(math.floor, math.ceil),
                                        include_last=True)
                                logger.debug(
                                    'cut window: %f - %f, nsamles: %g' %
                                    (tr.tmin, tr.tmax, tr.ydata.size))
                                have_data.add(tr.nslc_id)
                                have_data_site[site].add(tr.nslc_id)
                            except trace.NoData:
                                pass

                        fns2 = io.save(trs, fn_template_raw)
                        for fn in fns2:
                            if fn in fns:
                                logger.warn('overwriting file %s', fn)
                        fns.extend(fns2)

                    except fdsn.EmptyResult:
                        pass

                    except HTTPError:
                        logger.warn('an error occurred while downloading data '
                                    'for channels \n  %s' %
                                    '\n  '.join('.'.join(x[:4])
                                                for x in selection_now))

                    f.close()
                    i += neach

    if options.dry_run:
        nslcs = sorted(available_through.keys())

        all_channels = defaultdict(set)
        all_stations = defaultdict(set)

        def plural_s(x):
            return '' if x == 1 else 's'

        for nslc in nslcs:
            sites = tuple(sorted(available_through[nslc]))
            logger.info('selected: %s.%s.%s.%s from site%s %s' %
                        (nslc + (plural_s(len(sites)), '+'.join(sites))))

            all_channels[sites].add(nslc)
            all_stations[sites].add(nslc[:3])

        nchannels_all = 0
        nstations_all = 0
        for sites in sorted(all_channels.keys(),
                            key=lambda sites: (-len(sites), sites)):

            nchannels = len(all_channels[sites])
            nstations = len(all_stations[sites])
            nchannels_all += nchannels
            nstations_all += nstations
            logger.info('selected (%s): %i channel%s (%i station%s)' %
                        ('+'.join(sites), nchannels, plural_s(nchannels),
                         nstations, plural_s(nstations)))

        logger.info('selected total: %i channel%s (%i station%s)' %
                    (nchannels_all, plural_s(nchannels_all), nstations_all,
                     plural_s(nstations_all)))

        logger.info('dry run done.')
        sys.exit(0)

    for nslc in have_data:
        # if we are in continue mode, we have to guess where the data came from
        if not any(nslc in have_data_site[site] for site in sites):
            for site in sites:
                if nslc in could_have_data_site[site]:
                    have_data_site[site].add(nslc)

    sxs = {}
    for site in sites:
        selection = []
        for nslc in sorted(have_data_site[site]):
            selection.append(nslc + (tmin - tpad, tmax + tpad))

        if selection:
            logger.info('downloading response information (%s)' % site)
            sxs[site] = fdsn.station(site=site,
                                     level='response',
                                     selection=selection)

            sxs[site].dump_xml(filename=op.join(output_dir, 'stations.%s.xml' %
                                                site))

    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site['local'] = set()
        plocal = pile.make_pile(options.local_data, fileformat='detect')
        logger.info(
            'Importing local data from %s between %s (%f) and %s (%f)' %
            (options.local_data, util.time_to_str(tmin), tmin,
             util.time_to_str(tmax), tmax))
        for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                             tmin=tmin,
                                             tmax=tmax,
                                             tinc=tinc):

            for tr in traces:
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site['local'].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)

        sites.append('local')

    if options.local_responses_pz:
        sxs['local'] = epz.make_stationxml(
            epz.iload(options.local_responses_pz))

    if options.local_responses_resp:
        local_stations = []
        for fn in options.local_stations:
            local_stations.extend(model.load_stations(fn))

        sxs['local'] = resp.make_stationxml(
            local_stations, resp.iload(options.local_responses_resp))

    if options.local_responses_stationxml:
        sxs['local'] = stationxml.load_xml(
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error('no data available')
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.)
    otinc = 3600.
    otmin = math.floor(p.tmin / otinc) * otinc
    otmax = math.ceil(p.tmax / otinc) * otinc
    otpad = tpad * 2

    fns = []
    rest_traces_b = []
    win_b = None
    for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                      tmin=otmin,
                                      tmax=otmax,
                                      tinc=otinc,
                                      tpad=otpad):

        rest_traces_a = []
        win_a = None
        for tr in traces_a:
            win_a = tr.wmin, tr.wmax

            if win_b and win_b[0] >= win_a[0]:
                fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))
                rest_traces_b = []
                win_b = None

            response = None
            failure = []
            for site in sites:
                try:
                    if site not in sxs:
                        continue
                    logger.debug('Getting response for %s' % tr.__str__())
                    response = sxs[site].get_pyrocko_response(
                        tr.nslc_id,
                        timespan=(tr.tmin, tr.tmax),
                        fake_input_units=options.output_units)

                    break

                except stationxml.NoResponseInformation:
                    failure.append('%s: no response information' % site)

                except stationxml.MultipleResponseInformation:
                    failure.append('%s: multiple response information' % site)

            if response is None:
                failure = ', '.join(failure)

            else:
                failure = ''
                try:
                    if tr.tmin > tmin and options.zero_pad:
                        logger.warning(
                            'Trace too short for clean restitution in '
                            'desired frequency band -> zero-padding!')
                        tr.extend(tr.tmin - tfade, tr.tmax + tfade, 'repeat')

                    rest_tr = tr.transfer(tfade, ftap, response, invert=True)
                    rest_traces_a.append(rest_tr)

                except (trace.TraceTooShort, trace.NoData):
                    failure = 'trace too short'

            if failure:
                logger.warn('failed to restitute trace %s.%s.%s.%s (%s)' %
                            (tr.nslc_id + (failure, )))

        if rest_traces_b:
            rest_traces = trace.degapper(rest_traces_b + rest_traces_a,
                                         deoverlap='crossfade_cos')

            fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest))
            rest_traces_a = []
            if win_a:
                for tr in rest_traces:
                    try:
                        rest_traces_a.append(
                            tr.chop(win_a[0], win_a[1] + otpad, inplace=False))
                    except trace.NoData:
                        pass

        rest_traces_b = rest_traces_a
        win_b = win_a

    fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))

    # chapter 3: rotated restituted traces for inspection

    if not event:
        sys.exit(0)

    fn_template1 = \
        'DISPL.%(network)s.%(station)s.%(location)s.%(channel)s'

    fn_waveforms = op.join(output_dir, 'prepared', fn_template1)
    fn_stations = op.join(output_dir, 'stations.prepared.txt')
    fn_event = op.join(event_dir, 'event.txt')
    fn_event_yaml = op.join(event_dir, 'event.yaml')

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    p = pile.make_pile(fns, show_progress=False)

    deltat = None
    if sample_rate is not None:
        deltat = 1.0 / sample_rate

    traces_beat = []
    used_stations = []
    for nsl, s in nsl_to_station.items():
        s.set_event_relative_data(event)
        traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl)

        if options.out_components == 'rtu':
            pios = s.guess_projections_to_rtu(out_channels=('R', 'T', 'Z'))
        elif options.out_components == 'enu':
            pios = s.guess_projections_to_enu(out_channels=('E', 'N', 'Z'))
        else:
            assert False

        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                tr_beat = heart.SeismicDataset.from_pyrocko_trace(tr)
                traces_beat.append(tr_beat)
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)

            if proc:
                io.save(proc, fn_waveforms)
                used_stations.append(s)

    stations = list(used_stations)
    util.ensuredirs(fn_stations)
    model.dump_stations(stations, fn_stations)
    model.dump_events([event], fn_event)

    from pyrocko.guts import dump
    dump([event], filename=fn_event_yaml)

    utility.dump_objects(op.join(cwd, 'seismic_data.pkl'),
                         outlist=[stations, traces_beat])
    logger.info('prepared waveforms from %i stations' % len(stations))
예제 #29
0
def link_test_data(path, dest=None):
    if dest is None:
        dest = path
    fn = get_test_data(path)
    util.ensuredirs(dest)
    os.symlink(fn, dest.rstrip(os.sep))
예제 #30
0
파일: prepare.py 프로젝트: woxin5295/kiwi
def save_kiwi_dataset(acc, stations, traces, event, config):
    
    if config.has('data_dir'):
        data_dir = config.path('data_dir')
        if os.path.exists(data_dir):
            shutil.rmtree(data_dir)
    
    if config.has('skeleton_dir'): 
        copy_files(config.path('skeleton_dir'), config.path('main_dir'))

    if config.has('raw_trace_path'):
        trace_selector = None
        if config.has('station_filter'):
            trace_selector = lambda tr: get_nsl(tr) in stations and config.station_filter(stations[get_nsl(tr)])
        
        for raw_traces in acc.iter_traces(trace_selector=trace_selector):
            io.save(raw_traces, config.path('raw_trace_path'))
    
    eventfn = config.path('event_info_path')
    util.ensuredirs(eventfn)
    save_event_info_file(eventfn, event)
    
    dstations = stations.values()
    dstations.sort( lambda a,b: cmp(a.dist_m, b.dist_m) )
    
    # gather traces by station
    dataset = []
    used_channels = {}
    for station in dstations:
        station_traces = []
        for tr in traces:
            if get_nsl(tr) == get_nsl(station):
                if tr.channel in config.wanted_channels:
                    station_traces.append(tr)
                    if get_nsl(tr) not in used_channels:
                        used_channels[get_nsl(tr)] = []
                    
                    used_channels[get_nsl(tr)].append(station.get_channel(tr.channel))
                        
        station_traces.sort(lambda a,b: cmp(a.channel, b.channel))
        kiwi_components = ''
        for tr in station_traces:
            kiwi_components += config.kiwi_component_map[tr.channel]
        if station_traces:
            dataset.append( (station, kiwi_components, station_traces) )
    
    if config.has('stations_path'):
        fpath = config.path('stations_path')
        util.ensuredirs(fpath)
        ddstations = copy.deepcopy(dstations)
        for sta in ddstations:
            if get_nsl(sta) in used_channels:
               sta.set_channels(used_channels[get_nsl(sta)])
        model.dump_stations(ddstations, fpath)
    
    if config.has('receivers_path'):
        fpath = config.path('receivers_path')
        util.ensuredirs(fpath)
        f = open(fpath, 'w')
        
    iref = 1
    nsets = 1
    if config.has('nsets'):
        nsets = config.nsets
    for station, components, traces in dataset:
        nsl = '.'.join((get_nsl(station)))
        for i in range(nsets):
            depth = 0.0
            if station.depth is not None:
                depth = station.depth
            if config.has('receivers_path'):
                f.write('%15.8e %15.8e %15.8e %3s %-15s\n' % (station.lat, station.lon, depth, components, nsl) )
            for tr in traces:
                tr = tr.copy()
                if config.trace_time_zero == 'event':
                    tr.shift(-event.time)
                ydata = tr.get_ydata()
                ydata *= config.trace_factor
                fn = config.path('displacement_trace_path', {
                    'ireceiver': iref, 
                    'component': config.kiwi_component_map[tr.channel],
                    'network': tr.network,
                    'station': tr.station,
                    'location': tr.location,
                    'channel': tr.channel})
                io.save([tr], fn)
                
            iref += 1
    if config.has('receivers_path'):
        f.close()
    
    if config.has('reference_time_path'):
        fpath = config.path('reference_time_path')
        f = open(fpath, 'w')
        f.write('%i %s\n' % (event.time, 
                        time.strftime('%Y/%m/%d %H:%M:%S', 
                                        time.gmtime(event.time))))
        f.close()
    
    if config.has('source_origin_path'):
        fpath = config.path('source_origin_path')
        f = open(fpath, 'w')
        t = 0.
        if config.trace_time_zero == 'system':
            t = event.time

        f.write('%e %e %f\n' % (event.lat, event.lon, t))
        f.close()
예제 #31
0
def write_config(conf):
    conf_path = expand(conf_path_tmpl)
    util.ensuredirs(conf_path)
    dump(conf, filename=conf_path)
예제 #32
0
파일: config.py 프로젝트: shineusn/pyrocko
def write_config(conf, config_name='config'):
    conf_path = expand(make_conf_path_tmpl(config_name))
    util.ensuredirs(conf_path)
    dump(conf, filename=conf_path)
예제 #33
0
    def get_table(self, grid, receivers):
        distances = grid.lateral_distances(receivers)
        r_depths = num.array([r.z for r in receivers], dtype=num.float)
        s_depths = grid.depths()
        x_bounds = num.array(
            [[num.min(r_depths), num.max(r_depths)],
             [num.min(s_depths), num.max(s_depths)],
             [num.min(distances), num.max(distances)]],
            dtype=num.float)

        x_tolerance = num.array((grid.dz / 2., grid.dz / 2., grid.dx / 2.))
        t_tolerance = grid.max_delta() / (self.get_vmin() * 5.)
        earthmodel = self.get_earthmodel()

        interpolated_tts = {}

        for phase_def in self._tabulated_phases:
            ttt_hash = self.ttt_hash(earthmodel, phase_def.phases, x_bounds,
                                     x_tolerance, t_tolerance)

            fpath = self.ttt_path(ttt_hash)

            if not op.exists(fpath):

                def evaluate(args):
                    receiver_depth, source_depth, x = args
                    t = []
                    rays = earthmodel.arrivals(phases=phase_def.phases,
                                               distances=[x * cake.m2d],
                                               zstart=source_depth,
                                               zstop=receiver_depth)

                    for ray in rays:
                        t.append(ray.t)

                    if t:
                        return min(t)
                    else:
                        return None

                logger.info('prepare tabulated phases: %s [%s]' %
                            (phase_def.id, ttt_hash))

                sptree = spit.SPTree(f=evaluate,
                                     ftol=t_tolerance,
                                     xbounds=x_bounds,
                                     xtols=x_tolerance)

                util.ensuredirs(fpath)
                sptree.dump(filename=fpath)
            else:
                sptree = spit.SPTree(filename=fpath)

            interpolated_tts["stored:" + str(phase_def.id)] = sptree

        arrivals = num.zeros(distances.shape)

        def interpolate(phase_id):
            return interpolated_tts[phase_id].interpolate_many

        for i_r, r in enumerate(receivers):
            r_depths = num.zeros(distances.shape[0]) + r.z
            coords = num.zeros((distances.shape[0], 3))
            coords[:, 0] = r_depths
            coords[:, 1] = s_depths
            coords[:, 2] = distances[:, i_r]
            arr = self.timing.evaluate(interpolate, coords)
            arrivals[:, i_r] = arr

        return arrivals * self.factor + self.offset
예제 #34
0
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error('no data available')
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.)
예제 #35
0
def main():
    parser = OptionParser(usage=usage, description=description)

    parser.add_option(
        "--force",
        dest="force",
        action="store_true",
        default=False,
        help="allow recreation of output <directory>",
    )

    parser.add_option(
        "--debug",
        dest="debug",
        action="store_true",
        default=False,
        help="print debugging information to stderr",
    )

    parser.add_option(
        "--dry-run",
        dest="dry_run",
        action="store_true",
        default=False,
        help="show available stations/channels and exit "
        "(do not download waveforms)",
    )

    parser.add_option(
        "--continue",
        dest="continue_",
        action="store_true",
        default=False,
        help="continue download after a accident",
    )

    parser.add_option(
        "--local-data",
        dest="local_data",
        action="append",
        help="add file/directory with local data",
    )

    parser.add_option(
        "--local-stations",
        dest="local_stations",
        action="append",
        help="add local stations file",
    )

    parser.add_option(
        "--local-responses-resp",
        dest="local_responses_resp",
        action="append",
        help="add file/directory with local responses in RESP format",
    )

    parser.add_option(
        "--local-responses-pz",
        dest="local_responses_pz",
        action="append",
        help="add file/directory with local pole-zero responses",
    )

    parser.add_option(
        "--local-responses-stationxml",
        dest="local_responses_stationxml",
        help="add file with local response information in StationXML format",
    )

    parser.add_option(
        "--window",
        dest="window",
        default="full",
        help='set time window to choose [full, p, "<time-start>,<time-end>"'
        "] (time format is YYYY-MM-DD HH:MM:SS)",
    )

    parser.add_option(
        "--out-components",
        choices=["enu", "rtu"],
        dest="out_components",
        default="rtu",
        help="set output component orientations to radial-transverse-up [rtu] "
        "(default) or east-north-up [enu]",
    )

    parser.add_option(
        "--padding-factor",
        type=float,
        default=3.0,
        dest="padding_factor",
        help="extend time window on either side, in multiples of 1/<fmin_hz> "
        "(default: 5)",
    )

    parser.add_option(
        "--credentials",
        dest="user_credentials",
        action="append",
        default=[],
        metavar="SITE,USER,PASSWD",
        help="user credentials for specific site to access restricted data "
        "(this option can be repeated)",
    )

    parser.add_option(
        "--token",
        dest="auth_tokens",
        metavar="SITE,FILENAME",
        action="append",
        default=[],
        help="user authentication token for specific site to access "
        "restricted data (this option can be repeated)",
    )

    parser.add_option(
        "--sites",
        dest="sites",
        metavar="SITE1,SITE2,...",
        #    default='bgr',
        default="http://ws.gpi.kit.edu,bgr,http://188.246.25.142:8080",
        help='sites to query (available: %s, default: "%%default"' %
        ", ".join(g_sites_available),
    )

    parser.add_option(
        "--band-codes",
        dest="priority_band_code",
        metavar="V,L,M,B,H,S,E,...",
        default="V,L,M,B,H,E",
        help="select and prioritize band codes (default: %default)",
    )

    parser.add_option(
        "--instrument-codes",
        dest="priority_instrument_code",
        metavar="H,L,G,...",
        default="H,L,O,",
        help="select and prioritize instrument codes (default: %default)",
    )

    parser.add_option(
        "--radius-min",
        dest="radius_min",
        metavar="VALUE",
        default=0.0,
        type=float,
        help="minimum radius [km]",
    )

    parser.add_option(
        "--tinc",
        dest="tinc",
        metavar="VALUE",
        default=3600.0 * 12.0,
        type=float,
        help="length of seperate saved files in s",
    )

    parser.add_option(
        "--nstations-wanted",
        dest="nstations_wanted",
        metavar="N",
        type=int,
        help="number of stations to select initially",
    )

    (options, args) = parser.parse_args(sys.argv[1:])
    if len(args) not in (9, 6, 5):
        parser.print_help()
        sys.exit(1)

    if options.debug:
        util.setup_logging(program_name, "debug")
    else:
        util.setup_logging(program_name, "info")

    if options.local_responses_pz and options.local_responses_resp:
        logger.critical("cannot use local responses in PZ and RESP "
                        "format at the same time")
        sys.exit(1)

    n_resp_opt = 0
    for resp_opt in (
            options.local_responses_pz,
            options.local_responses_resp,
            options.local_responses_stationxml,
    ):

        if resp_opt:
            n_resp_opt += 1

    if n_resp_opt > 1:
        logger.critical("can only handle local responses from either PZ or "
                        "RESP or StationXML. Cannot yet merge different "
                        "response formats.")
        sys.exit(1)

    if options.local_responses_resp and not options.local_stations:
        logger.critical("--local-responses-resp can only be used "
                        "when --stations is also given.")
        sys.exit(1)

    try:
        ename = ""
        magnitude = None
        mt = None
        if len(args) == 9:
            time = util.str_to_time(args[0] + " " + args[1])
            lat = float(args[2])
            lon = float(args[3])
            depth = float(args[4]) * km
            iarg = 5

        elif len(args) == 6:
            if args[1].find(":") == -1:
                sname_or_date = None
                lat = float(args[0])
                lon = float(args[1])
                event = None
                time = None
            else:
                sname_or_date = args[0] + " " + args[1]

            iarg = 2

        elif len(args) == 5:
            sname_or_date = args[0]
            iarg = 1

        if len(args) in (6, 5) and sname_or_date is not None:
            events = get_events_by_name_or_date([sname_or_date],
                                                catalog=geofon)
            if len(events) == 0:
                logger.critical("no event found")
                sys.exit(1)
            elif len(events) > 1:
                logger.critical("more than one event found")
                sys.exit(1)

            event = events[0]
            time = event.time
            lat = event.lat
            lon = event.lon
            depth = event.depth
            ename = event.name
            magnitude = event.magnitude
            mt = event.moment_tensor

        radius = float(args[iarg]) * km
        fmin = float(args[iarg + 1])
        sample_rate = float(args[iarg + 2])

        eventname = args[iarg + 3]
        event_dir = op.join("data", "events", eventname)
        output_dir = op.join(event_dir, "waveforms")
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(
            time=time,
            lat=lat,
            lon=lon,
            depth=depth,
            name=ename,
            magnitude=magnitude,
            moment_tensor=mt,
        )

    if options.window == "full":
        if event is None:
            logger.critical("need event for --window=full")
            sys.exit(1)

        low_velocity = 1500.0
        timewindow = VelocityWindow(low_velocity,
                                    tpad=options.padding_factor / fmin)

        tmin, tmax = timewindow(time, radius, depth)

    elif options.window == "p":
        if event is None:
            logger.critical("need event for --window=p")
            sys.exit(1)

        phases = list(map(cake.PhaseDef, "P p".split()))
        emod = cake.load_model()

        tpad = options.padding_factor / fmin
        timewindow = PhaseWindow(emod, phases, -tpad, tpad)

        arrivaltimes = []
        for dist in num.linspace(0, radius, 20):
            try:
                arrivaltimes.extend(timewindow(time, dist, depth))
            except NoArrival:
                pass

        if not arrivaltimes:
            logger.error("required phase arrival not found")
            sys.exit(1)

        tmin = min(arrivaltimes)
        tmax = max(arrivaltimes)

    else:
        try:
            stmin, stmax = options.window.split(",")
            tmin = util.str_to_time(stmin.strip())
            tmax = util.str_to_time(stmax.strip())

            timewindow = FixedWindow(tmin, tmax)

        except ValueError:
            logger.critical('invalid argument to --window: "%s"' %
                            options.window)
            sys.exit(1)

    if event is not None:
        event.name = eventname

    tlen = tmax - tmin
    tfade = tfade_factor / fmin

    tpad = tfade

    tmin -= tpad
    tmax += tpad

    priority_band_code = options.priority_band_code.split(",")
    for s in priority_band_code:
        if len(s) != 1:
            logger.critical("invalid band code: %s" % s)

    priority_instrument_code = options.priority_instrument_code.split(",")
    for s in priority_instrument_code:
        if len(s) != 1:
            logger.critical("invalid instrument code: %s" % s)

    station_query_conf = dict(
        latitude=lat,
        longitude=lon,
        minradius=options.radius_min * km * cake.m2d,
        maxradius=radius * cake.m2d,
        channel=",".join("?%s?" % s for s in priority_band_code),
    )

    target_sample_rate = sample_rate

    fmax = target_sample_rate

    # target_sample_rate = None
    # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S']

    priority_units = ["M/S", "M", "M/S**2"]

    output_units = "M"

    sites = [x.strip() for x in options.sites.split(",") if x.strip()]
    tinc = options.tinc
    #  for site in sites:
    #     if site not in g_sites_available:
    #        logger.critical('unknown FDSN site: %s' % site)
    #       sys.exit(1)

    for s in options.user_credentials:
        try:
            site, user, passwd = s.split(",")
            g_user_credentials[site] = user, passwd
        except ValueError:
            logger.critical('invalid format for user credentials: "%s"' % s)
            sys.exit(1)

    for s in options.auth_tokens:
        try:
            site, token_filename = s.split(",")
            with open(token_filename, "r") as f:
                g_auth_tokens[site] = f.read()
        except (ValueError, OSError, IOError):
            logger.critical("cannot get token from file: %s" % token_filename)
            sys.exit(1)

    fn_template0 = (
        "data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed")

    fn_template_raw = op.join(output_dir, "raw", fn_template0)
    fn_template_raw_folder = op.join(output_dir, "raw/", "traces.mseed")
    fn_stations_raw = op.join(output_dir, "stations.raw.txt")
    fn_template_rest = op.join(output_dir, "rest", fn_template0)
    fn_commandline = op.join(output_dir, "seigerdown.command")

    ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax)

    # chapter 1: download

    sxs = []
    for site in sites:
        try:
            extra_args = {
                "iris": dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == "geonet":
                extra_args.update(starttime=tmin, endtime=tmax)
            else:
                extra_args.update(
                    startbefore=tmax,
                    endafter=tmin,
                    includerestricted=(site in g_user_credentials
                                       or site in g_auth_tokens),
                )

            logger.info("downloading channel information (%s)" % site)
            sx = fdsn.station(site=site,
                              format="text",
                              level="channel",
                              **extra_args)

        except fdsn.EmptyResult:
            logger.error("No stations matching given criteria. (%s)" % site)
            sx = None

        if sx is not None:
            sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)

    nsl_to_sites = defaultdict(list)
    nsl_to_station = {}
    for sx, site in zip(sxs, sites):
        site_stations = sx.get_pyrocko_stations()
        for s in site_stations:
            nsl = s.nsl()
            nsl_to_sites[nsl].append(site)
            if nsl not in nsl_to_station:
                nsl_to_station[nsl] = s  # using first site with this station
    logger.info("number of stations found: %i" % len(nsl_to_station))

    # station weeding

    nsls_selected = None
    if options.nstations_wanted:
        stations_all = [
            nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())
        ]

        for s in stations_all:
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(stations_all,
                                                  options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info("number of stations selected: %i" % len(nsls_selected))

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
        logger.info("time window %i/%i (%s - %s)" %
                    (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win)))

        have_data_this_window = set()
        if options.continue_:
            trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False)
            for tr in trs_avail:
                have_data_this_window.add(tr.nslc_id)
        for site, sx in zip(sites, sxs):
            if sx is None:
                continue

            selection = []
            channels = sx.choose_channels(
                target_sample_rate=target_sample_rate,
                priority_band_code=priority_band_code,
                priority_units=priority_units,
                priority_instrument_code=priority_instrument_code,
                timespan=(tmin_win, tmax_win),
            )

            for nslc in sorted(channels.keys()):
                if nsls_selected is not None and nslc[:3] not in nsls_selected:
                    continue

                could_have_data_site[site].add(nslc)

                if nslc not in have_data_this_window:
                    channel = channels[nslc]
                    if event:
                        lat_, lon_ = event.lat, event.lon
                    else:
                        lat_, lon_ = lat, lon

                    dist = orthodrome.distance_accurate50m_numpy(
                        lat_, lon_, channel.latitude.value,
                        channel.longitude.value)

                    if event:
                        depth_ = event.depth
                        time_ = event.time
                    else:
                        depth_ = None
                        time_ = None

                    tmin_, tmax_ = timewindow(time_, dist, depth_)

                    tmin_this = tmin_ - tpad
                    tmax_this = tmax_ + tpad

                    tmin_req = max(tmin_win, tmin_this)
                    tmax_req = min(tmax_win, tmax_this)

                    if channel.sample_rate:
                        deltat = 1.0 / channel.sample_rate.value
                    else:
                        deltat = 1.0

                    if tmin_req < tmax_req:
                        # extend time window by some samples because otherwise
                        # sometimes gaps are produced
                        selection.append(nslc + (tmin_req - deltat * 10.0,
                                                 tmax_req + deltat * 10.0))

            if options.dry_run:
                for (net, sta, loc, cha, tmin, tmax) in selection:
                    available_through[net, sta, loc, cha].add(site)

            else:
                neach = 100
                i = 0
                nbatches = ((len(selection) - 1) // neach) + 1
                while i < len(selection):
                    selection_now = selection[i:i + neach]

                    f = tempfile.NamedTemporaryFile()
                    try:
                        sbatch = ""
                        if nbatches > 1:
                            sbatch = " (batch %i/%i)" % (
                                (i // neach) + 1, nbatches)

                        logger.info("downloading data (%s)%s" % (site, sbatch))
                        data = fdsn.dataselect(site=site,
                                               selection=selection_now,
                                               **get_user_credentials(site))

                        while True:
                            buf = data.read(1024)
                            if not buf:
                                break
                            f.write(buf)

                        f.flush()

                        trs = io.load(f.name)
                        for tr in trs:
                            if tr.station == "7869":
                                tr.station = "MOER"
                                tr.network = "LE"
                                tr.location = ""
                            try:
                                tr.chop(tmin_win, tmax_win)
                                have_data.add(tr.nslc_id)
                                have_data_site[site].add(tr.nslc_id)
                            except trace.NoData:
                                pass

                        fns2 = io.save(trs, fn_template_raw)
                        io.save(trs, fn_template_raw_folder)
                        for fn in fns2:
                            if fn in fns:
                                logger.warn("overwriting file %s", fn)
                        fns.extend(fns2)

                    except fdsn.EmptyResult:
                        pass

                    except HTTPError:
                        logger.warn("an error occurred while downloading data "
                                    "for channels \n  %s" %
                                    "\n  ".join(".".join(x[:4])
                                                for x in selection_now))

                    f.close()
                    i += neach

    if options.dry_run:
        nslcs = sorted(available_through.keys())

        all_channels = defaultdict(set)
        all_stations = defaultdict(set)

        def plural_s(x):
            return "" if x == 1 else "s"

        for nslc in nslcs:
            sites = tuple(sorted(available_through[nslc]))
            logger.info("selected: %s.%s.%s.%s from site%s %s" %
                        (nslc + (plural_s(len(sites)), "+".join(sites))))

            all_channels[sites].add(nslc)
            all_stations[sites].add(nslc[:3])

        nchannels_all = 0
        nstations_all = 0
        for sites in sorted(all_channels.keys(),
                            key=lambda sites: (-len(sites), sites)):

            nchannels = len(all_channels[sites])
            nstations = len(all_stations[sites])
            nchannels_all += nchannels
            nstations_all += nstations
            logger.info("selected (%s): %i channel%s (%i station%s)" % (
                "+".join(sites),
                nchannels,
                plural_s(nchannels),
                nstations,
                plural_s(nstations),
            ))

        logger.info("selected total: %i channel%s (%i station%s)" % (
            nchannels_all,
            plural_s(nchannels_all),
            nstations_all,
            plural_s(nstations_all),
        ))

        logger.info("dry run done.")
        sys.exit(0)

    for nslc in have_data:
        # if we are in continue mode, we have to guess where the data came from
        if not any(nslc in have_data_site[site] for site in sites):
            for site in sites:
                if nslc in could_have_data_site[site]:
                    have_data_site[site].add(nslc)

    sxs = {}
    for site in sites:
        selection = []
        for nslc in sorted(have_data_site[site]):
            selection.append(nslc + (tmin - tpad, tmax + tpad))

        if selection:
            logger.info("downloading response information (%s)" % site)
            sxs[site] = fdsn.station(site=site,
                                     level="response",
                                     selection=selection)
            sited = site

            if site == "http://192.168.11.220:8080":
                sited = "bgr_internal"
            elif site == "http://ws.gpi.kit.edu":
                sited = "kit"
            if site == "http://188.246.25.142:8080":
                sited = "moer"

            sxs[site].dump_xml(filename=op.join(output_dir, "stations.%s.xml" %
                                                sited))

    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site["local"] = set()
        plocal = pile.make_pile(options.local_data, fileformat="detect")
        for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                             tmin=tmin,
                                             tmax=tmax,
                                             tinc=tinc):

            for tr in traces:
                if tr.station == "7869":
                    tr.station = "MOER"
                    tr.network = "LE"
                    tr.location = ""
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site["local"].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)

        sites.append("local")

    if options.local_responses_pz:
        sxs["local"] = epz.make_stationxml(
            epz.iload(options.local_responses_pz))

    if options.local_responses_resp:
        local_stations = []
        for fn in options.local_stations:
            local_stations.extend(model.load_stations(fn))

        sxs["local"] = resp.make_stationxml(
            local_stations, resp.iload(options.local_responses_resp))

    if options.local_responses_stationxml:
        sxs["local"] = stationxml.load_xml(
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error("no data available")
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.0)
    otinc = 3600.0
    otmin = math.floor(p.tmin / otinc) * otinc
    otmax = math.ceil(p.tmax / otinc) * otinc
    otpad = tpad * 2

    fns = []
    rest_traces_b = []
    win_b = None
    for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                      tmin=otmin,
                                      tmax=otmax,
                                      tinc=otinc,
                                      tpad=otpad):

        rest_traces_a = []
        win_a = None
        for tr in traces_a:
            if tr.station == "7869":
                tr.station = "MOER"
                tr.network = "LE"
                tr.location = ""
            win_a = tr.wmin, tr.wmax

            if win_b and win_b[0] >= win_a[0]:
                fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))
                rest_traces_b = []
                win_b = None

            response = None
            failure = []
            for site in sites:
                try:
                    if site not in sxs:
                        continue
                    response = sxs[site].get_pyrocko_response(
                        tr.nslc_id,
                        timespan=(tr.tmin, tr.tmax),
                        fake_input_units=output_units,
                    )

                    break

                except stationxml.NoResponseInformation:
                    failure.append("%s: no response information" % site)

                except stationxml.MultipleResponseInformation:
                    failure.append("%s: multiple response information" % site)

            if response is None:
                failure = ", ".join(failure)

            else:
                failure = ""
                try:
                    rest_tr = tr.transfer(tfade, ftap, response, invert=True)
                    rest_traces_a.append(rest_tr)

                except (trace.TraceTooShort, trace.NoData):
                    failure = "trace too short"

            if failure:
                logger.warn("failed to restitute trace %s.%s.%s.%s (%s)" %
                            (tr.nslc_id + (failure, )))

        if rest_traces_b:
            rest_traces = trace.degapper(rest_traces_b + rest_traces_a,
                                         deoverlap="crossfade_cos")

            fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest))
            rest_traces_a = []
            if win_a:
                for tr in rest_traces:
                    if tr.station == "7869":
                        tr.station = "MOER"
                        tr.network = "LE"
                        tr.location = ""
                    try:
                        rest_traces_a.append(
                            tr.chop(win_a[0], win_a[1] + otpad, inplace=False))
                    except trace.NoData:
                        pass

        rest_traces_b = rest_traces_a
        win_b = win_a

    fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))

    # chapter 3: rotated restituted traces for inspection

    if not event:
        sys.exit(0)

    fn_template1 = "DISPL.%(network)s.%(station)s.%(location)s.%(channel)s"

    fn_waveforms = op.join(output_dir, "prepared", fn_template1)
    fn_stations = op.join(output_dir, "stations.prepared.txt")
    fn_event = op.join(event_dir, "event.txt")

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    p = pile.make_pile(fns, show_progress=False)

    deltat = None
    if sample_rate is not None:
        deltat = 1.0 / sample_rate

    used_stations = []
    for nsl, s in nsl_to_station.items():
        s.set_event_relative_data(event)
        traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl)

        keep = []
        for tr in traces:
            if deltat is not None:
                try:
                    tr.downsample_to(deltat, snap=True, allow_upsample_max=5)
                    keep.append(tr)
                except util.UnavailableDecimation as e:
                    logger.warn("Cannot downsample %s.%s.%s.%s: %s" %
                                (tr.nslc_id + (e, )))
                    continue

        if options.out_components == "rtu":
            pios = s.guess_projections_to_rtu(out_channels=("R", "T", "Z"))
        elif options.out_components == "enu":
            pios = s.guess_projections_to_enu(out_channels=("E", "N", "Z"))
        else:
            assert False

        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)

            if proc:
                io.save(proc, fn_waveforms)
                used_stations.append(s)

    stations = list(used_stations)
    util.ensuredirs(fn_stations)
    model.dump_stations(stations, fn_stations)
    model.dump_events([event], fn_event)

    logger.info("prepared waveforms from %i stations" % len(stations))
예제 #36
0
def write_config(conf, config_name='config'):
    conf_path = expand(make_conf_path_tmpl(config_name))
    util.ensuredirs(conf_path)
    dump(conf, filename=conf_path)
예제 #37
0
def get_ttt(ct, coords, depth_opt):
    x_bounds = num.array(
        [
            [ct.r_depth_min, ct.r_depth_max],
            #[ct.s_depth_min, ct.s_depth_max],
            [depth_opt[0], depth_opt[1]],
            [ct.dist_min, ct.dist_max]
        ],
        dtype=num.float)

    # spatial tolerance for interpolating/ accuracy
    x_tolerance = num.array((ct.r_acc, ct.s_acc, ct.dist_acc))

    # temporal tolerance for interpolating/ accurancy
    t_tolerance = ct.t_acc  # s?

    earthmodel = CakeEarthmodel(id=ct.earthmodel_id,
                                earthmodel_1d=cake.load_model(
                                    cake.builtin_model_filename(
                                        ct.earthmodel_id)))

    interpolated_tts = {}

    for phase_def in ct.tabulated_phases:

        ttt_hash_ = ttt_hash(ct, earthmodel, phase_def.phases, x_bounds,
                             x_tolerance, t_tolerance)

        fpath = ttt_path(ct, ttt_hash_)

        # make ttt for current phase, bounds, tolerance settings
        # or read if already existing!
        if not op.exists(fpath):

            def evaluate(args):
                receiver_depth, source_depth, x = args
                t = []
                rays = earthmodel.earthmodel_1d.arrivals(
                    phases=phase_def.phases,
                    distances=[x * cake.m2d],
                    zstart=source_depth,
                    zstop=receiver_depth)

                for ray in rays:
                    t.append(ray.t)

                if t:
                    return min(t)
                else:
                    return None

            sptree = spit.SPTree(f=evaluate,
                                 ftol=t_tolerance,
                                 xbounds=x_bounds,
                                 xtols=x_tolerance)

            util.ensuredirs(fpath)
            sptree.dump(filename=fpath)
        else:
            sptree = spit.SPTree(filename=fpath)

        interpolated_tts["stored:" + str(phase_def.id)] = sptree
        intp_times = sptree.interpolate_many(coords)

        return intp_times
예제 #38
0
 def dump_problem_info(self, dirname):
     fn = op.join(dirname, 'problem.yaml')
     util.ensuredirs(fn)
     guts.dump(self, filename=fn)
예제 #39
0
파일: io.py 프로젝트: gladkovvalery/pyrocko
def save(traces, filename_template, format='mseed', additional={},
         stations=None, overwrite=True):
    '''Save traces to file(s).

    :param traces: a trace or an iterable of traces to store
    :param filename_template: filename template with placeholders for trace
            metadata. Uses normal python '%%(placeholder)s' string templates.
            The following placeholders are considered: ``network``,
            ``station``, ``location``, ``channel``, ``tmin``
            (time of first sample), ``tmax`` (time of last sample),
            ``tmin_ms``, ``tmax_ms``, ``tmin_us``, ``tmax_us``. The versions
            with '_ms' include milliseconds, the versions with '_us' include
            microseconds.
    :param format: %s
    :param additional: dict with custom template placeholder fillins.
    :param overwrite': if ``False``, raise an exception if file exists
    :returns: list of generated filenames

    .. note::
        Network, station, location, and channel codes may be silently truncated
        to file format specific maximum lengthes. 
    '''

    if isinstance(traces, trace.Trace):
        traces = [ traces ]

    if format == 'from_extension':
        format = os.path.splitext(filename_template)[1][1:]

    if format == 'mseed':
        return mseed.save(traces, filename_template, additional,
                          overwrite=overwrite)

    elif format == 'gse2':
        return gse2_io_wrap.save(traces, filename_template, additional,
                                 overwrite=overwrite)

    elif format == 'sac':
        fns = []
        for tr in traces:
            fn = tr.fill_template(filename_template, **additional)
            if not overwrite and os.path.exists(fn):
                raise FileSaveError('file exists: %s' % fn)

            util.ensuredirs(fn)

            f = sac.SacFile(from_trace=tr)
            if stations:
                s = stations[tr.network, tr.station, tr.location]
                f.stla = s.lat
                f.stlo = s.lon
                f.stel = s.elevation
                f.stdp = s.depth
                f.cmpinc = s.get_channel(tr.channel).dip + 90.
                f.cmpaz = s.get_channel(tr.channel).azimuth

            f.write(fn)
            fns.append(fn)
            
        return fns
   
    elif format == 'text':
        fns = []
        for tr in traces:
            fn = tr.fill_template(filename_template, **additional)
            if not overwrite and os.path.exists(fn):
                raise FileSaveError('file exists: %s' % fn)

            util.ensuredirs(fn)
            x,y = tr.get_xdata(), tr.get_ydata()
            num.savetxt(fn, num.transpose((x,y)))
            fns.append(fn)
        return fns
            
    elif format == 'yaff':
        return yaff.save(traces, filename_template, additional, 
                         overwrite=overwrite)
    else:
        raise UnsupportedFormat(format)
예제 #40
0
def put_test_data(path, dest=None):
    if dest is None:
        dest = path
    fn = get_test_data(path)
    util.ensuredirs(dest)
    shutil.copytree(fn, dest)
예제 #41
0
    def make_ttt(self, force=False):
        '''Compute travel time tables.

        Travel time tables are computed using the 1D earth model defined in
        :py:attr:`pyrocko.gf.meta.Config.earthmodel_1d` for each defined phase
        in :py:attr:`pyrocko.gf.meta.Config.tabulated_phases`. The accuracy of
        the tablulated times is adjusted to the sampling rate of the store.
        '''

        from pyrocko import cake
        config = self.config

        if not config.tabulated_phases:
            return

        mod = config.earthmodel_1d
        if not mod:
            raise StoreError('no earth model found')

        for pdef in config.tabulated_phases:

            phase_id = pdef.id
            phases = pdef.phases
            horvels = pdef.horizontal_velocities

            fn = os.path.join(self.store_dir, 'phases', '%s.phase' % phase_id)

            if os.path.exists(fn) and not force:
                logger.info('file already exists: %s' % fn)
                continue

            def evaluate(args):

                if len(args) == 2:
                    zr, zs, x = (config.receiver_depth,) + args
                elif len(args) == 3:
                    zr, zs, x = args
                else:
                    assert False

                t = []
                if phases:
                    rays = mod.arrivals(
                        phases=phases,
                        distances=[x*cake.m2d],
                        zstart=zs,
                        zstop=zr)

                    for ray in rays:
                        t.append(ray.t)

                for v in horvels:
                    t.append(x/(v*1000.))

                if t:
                    return min(t)
                else:
                    return None

            logger.info('making travel time table for phasegroup "%s"' %
                        phase_id)

            ip = spit.SPTree(
                f=evaluate,
                ftol=config.deltat*0.5,
                xbounds=num.transpose((config.mins, config.maxs)),
                xtols=config.deltas)

            util.ensuredirs(fn)
            ip.dump(fn)
예제 #42
0
 def dump_collection(self):
     path = self.path_collection()
     util.ensuredirs(path)
     guts.dump(self._collection, filename=path)
예제 #43
0
def write_config(conf):
    conf_path = expand(conf_path_tmpl)
    util.ensuredirs(conf_path)
    dump(conf, filename=conf_path)