示例#1
0
def command_init(args):

    def setup(parser):
        parser.add_option(
            '--force', dest='force', action='store_true',
            help='overwrite existing files')

    parser, options, args = cl_parse('init', args, setup=setup)

    if len(args) == 0:
        parser.print_help()
        sys.exit(1)

    scenario_dims = [52, 5.4, 90.]
    scenario_dims[:len(args[1:])] = map(float, args[1:])

    project_dir = args[0]
    logger.info('Initialising new scenario %s at %.2f, %.2f with radius %d km'
                % tuple([args[0]] + scenario_dims))

    scenario_dims[2] *= km
    scenario.ScenarioGenerator.initialize(
        project_dir, *scenario_dims, force=options.force)

    gf_stores_path = op.join(project_dir, 'gf_stores')
    util.ensuredir(gf_stores_path)
示例#2
0
    def __init__(self, store_dir, step, shared, block_size=None, tmp=None):

        self.store = gf.store.Store(store_dir, "w")

        if block_size is None:
            block_size = (1, 1, 2000)

        if len(self.store.config.ns) == 2:
            block_size = block_size[1:]

        gf.builder.Builder.__init__(self, self.store.config, step, block_size=block_size)

        baseconf = self.store.get_extra("ahfullgreen")

        conf = AhfullgreenConfigFull(**baseconf.items())
        conf.earthmodel_1d = self.store.config.earthmodel_1d
        deltat = 1.0 / self.store.config.sample_rate
        conf.deltat = deltat

        if "time_window_min" not in shared:
            d = self.store.make_timing_params(conf.time_region[0], conf.time_region[1])

            shared["time_window_min"] = d["tlenmax_vred"]

        time_window_min = shared["time_window_min"]

        conf.nsamples = nextpow2(int(round(time_window_min / deltat)) + 1)

        self.ahfullgreen_config = conf

        self.tmp = tmp
        if self.tmp is not None:
            util.ensuredir(self.tmp)
示例#3
0
    def create_editables(store_dir, config, force=False, extra=None):
        try:
            util.ensuredir(store_dir)
        except:
            raise CannotCreate('cannot create directory %s' % store_dir)

        fns = []

        config_fn = os.path.join(store_dir, 'config')
        remove_if_exists(config_fn, force)
        meta.dump(config, filename=config_fn)

        fns.append(config_fn)

        for sub_dir in ['extra']:
            dpath = os.path.join(store_dir, sub_dir)
            remake_dir(dpath, force)

        if extra:
            for k, v in extra.iteritems():
                check_string_id(k)
                fn = os.path.join(store_dir, 'extra', k)
                remove_if_exists(fn, force)
                meta.dump(v, filename=fn)

                fns.append(fn)

        return fns
示例#4
0
文件: insar.py 项目: hvasbath/pyrocko
    def dump_data(self, engine, sources, path,
                  tmin=None, tmax=None, overwrite=False):
        path_insar = op.join(path, 'insar')
        util.ensuredir(path_insar)

        tmin, tmax = self.get_time_range(sources)
        tts = util.time_to_str

        fn_tpl = op.join(path_insar, 'insar-scene-{track_direction}_%s_%s'
                         % (tts(tmin), tts(tmax)))

        def scene_fn(track):
            return fn_tpl.format(track_direction=track.lower())

        for track in ('ascending', 'descending'):
            fn = '%s.yml' % scene_fn(track)

            if op.exists(fn) and not overwrite:
                logger.debug('Scene exists: %s' % fn)
                continue

            scenes = self.get_insar_scenes(engine, sources, tmin, tmax)
            for sc in scenes:
                fn = scene_fn(sc.config.meta.orbit_direction)
                logger.debug('Writing %s' % fn)
                sc.save('%s.npz' % fn)

        return [path_insar]
    def dump_responses(self, path):
        from pyrocko.io import stationxml

        logger.debug('Writing out StationXML...')

        path_responses = op.join(path, 'meta')
        util.ensuredir(path_responses)
        fn_stationxml = op.join(path_responses, 'stations.xml')

        stations = self.station_generator.get_stations()
        sxml = stationxml.FDSNStationXML.from_pyrocko_stations(stations)

        sunit = {
            'displacement': 'M',
            'velocity': 'M/S',
            'acceleration': 'M/S**2',
            'counts': 'COUNTS'}[self.seismogram_quantity]

        response = stationxml.Response(
            instrument_sensitivity=stationxml.Sensitivity(
                value=1.,
                frequency=1.,
                input_units=stationxml.Units(sunit),
                output_units=stationxml.Units('COUNTS')),
            stage_list=[])

        for net, station, channel in sxml.iter_network_station_channels():
            channel.response = response

        sxml.dump_xml(filename=fn_stationxml)

        return [path_responses]
示例#6
0
    def initialize(
            cls, path,
            center_lat=None, center_lon=None, radius=None,
            targets=AVAILABLE_TARGETS,
            force=False):
        import os.path as op

        if op.exists(path) and not force:
            raise CannotCreate('Directory %s alread exists! May use force?'
                               % path)

        util.ensuredir(path)
        fn = op.join(path, 'scenario.yml')
        logger.debug('Writing new scenario to %s' % fn)

        scenario = cls()
        scenario.target_generators.extend([t() for t in targets])

        for gen in scenario.target_generators:
            gen.update_hierarchy(scenario)

        scenario.center_lat = center_lat
        scenario.center_lon = center_lon
        scenario.radius = radius

        scenario.dump(filename=fn)

        return scenario
示例#7
0
文件: qssp.py 项目: emolch/pyrocko
    def __init__(self, store_dir, step, shared, block_size=None, tmp=None,
                 force=False):
        self.gfmapping = [
            (MomentTensor(m=symmat6(1, 0, 0, 1, 0, 0)),
             {'un': (0, -1), 'ue': (3, -1), 'uz': (5, -1)}),
            (MomentTensor(m=symmat6(0, 0, 0, 0, 1, 1)),
             {'un': (1, -1), 'ue': (4, -1), 'uz': (6, -1)}),
            (MomentTensor(m=symmat6(0, 0, 1, 0, 0, 0)),
             {'un': (2, -1), 'uz': (7, -1)}),
            (MomentTensor(m=symmat6(0, 1, 0, 0, 0, 0)),
             {'un': (8, -1), 'uz': (9, -1)}),
        ]

        self.store = gf.store.Store(store_dir, 'w')

        if step == 0:
            block_size = (1, 1, self.store.config.ndistances)
        else:
            if block_size is None:
                block_size = (1, 1, 51)

        if len(self.store.config.ns) == 2:
            block_size = block_size[1:]

        gf.builder.Builder.__init__(
            self, self.store.config, step, block_size=block_size, force=force)

        baseconf = self.store.get_extra('qssp')

        conf = QSSPConfigFull(**baseconf.items())
        conf.gf_directory = pjoin(store_dir, 'qssp_green')
        conf.earthmodel_1d = self.store.config.earthmodel_1d
        deltat = self.store.config.deltat

        if 'time_window' not in shared:
            d = self.store.make_timing_params(
                conf.time_region[0], conf.time_region[1],
                force=force)

            tmax = math.ceil(d['tmax'] / deltat) * deltat
            tmin = math.floor(d['tmin'] / deltat) * deltat

            shared['time_window'] = tmax - tmin
            shared['tstart'] = tmin

        self.tstart = shared['tstart']
        conf.time_window = shared['time_window']

        self.tmp = tmp
        if self.tmp is not None:
            util.ensuredir(self.tmp)

        util.ensuredir(conf.gf_directory)

        self.qssp_config = conf
示例#8
0
文件: pile.py 项目: gomes310/pyrocko
 def __init__(self, cachedir):
     '''Create new cache.
     
     :param cachedir: directory to hold the cache files.
       
     '''
     
     self.cachedir = cachedir
     self.dircaches = {}
     self.modified = set()
     util.ensuredir(self.cachedir)
示例#9
0
    def __init__(self, store_dir, step, shared, block_size=None, tmp=None ):

        if block_size is None:
            block_size = (51,1,51)

        self.store = gf.store.Store(store_dir, 'w')

        gf.builder.Builder.__init__(self, self.store.config, step, block_size=block_size)
        self.poel_config = self.store.get_extra('poel')

        self.tmp = tmp
        if self.tmp is not None:
            util.ensuredir(self.tmp)
    def get_insar_scenes(self):
        from kite import Scene
        if self._scenes is None:
            self._scenes = []
            path_insar = self.get_path('insar')
            util.ensuredir(path_insar)

            fns = util.select_files([path_insar], regex='\.(npz)$',
                                    show_progress=False)
            for f in fns:
                self._scenes.append(Scene.load(f))

        return self._scenes
示例#11
0
    def __init__(self,
                 store_dir,
                 step,
                 shared,
                 block_size=None,
                 tmp=None,
                 force=False):

        self.store = gf.store.Store(store_dir, 'w')

        if block_size is None:
            block_size = (1, 1, 100)

        if len(self.store.config.ns) == 2:
            block_size = block_size[1:]

        gf.builder.Builder.__init__(self,
                                    self.store.config,
                                    step,
                                    block_size=block_size,
                                    force=force)

        baseconf = self.store.get_extra('qseis')

        conf = QSeisConfigFull(**baseconf.items())
        conf.earthmodel_1d = self.store.config.earthmodel_1d
        conf.earthmodel_receiver_1d = self.store.config.earthmodel_receiver_1d

        deltat = 1.0 / self.gf_config.sample_rate

        if 'time_window_min' not in shared:
            d = self.store.make_timing_params(conf.time_region[0],
                                              conf.time_region[1])

            shared['time_window_min'] = d['tlenmax_vred']
            shared['time_start'] = d['tmin_vred']
            shared['time_reduction_velocity'] = d['vred'] / km

        time_window_min = shared['time_window_min']
        conf.time_start = shared['time_start']

        conf.time_reduction_velocity = shared['time_reduction_velocity']

        conf.nsamples = nextpow2(int(round(time_window_min / deltat)) + 1)
        conf.time_window = (conf.nsamples - 1) * deltat

        self.qseis_config = conf

        self.tmp = tmp
        if self.tmp is not None:
            util.ensuredir(self.tmp)
示例#12
0
def command_init(args):
    def setup(parser):
        parser.add_option('--force',
                          dest='force',
                          action='store_true',
                          help='overwrite existing files')
        parser.add_option('--location',
                          dest='location',
                          metavar='LAT,LON',
                          help='set scenario center location [deg]')
        parser.add_option('--radius',
                          dest='radius',
                          metavar='RADIUS',
                          type=float,
                          help='set scenario center location [km]')

    parser, options, args = cl_parse('init', args, setup=setup)

    if len(args) != 1:
        parser.print_help()
        sys.exit(1)

    if options.location:
        try:
            lat, lon = map(float, options.location.split(','))
        except Exception:
            die('expected --location=LAT,LON')
    else:
        lat = lon = None

    if options.radius is not None:
        radius = options.radius * km
    else:
        radius = None

    project_dir = args[0]
    try:
        scenario.ScenarioGenerator.initialize(project_dir,
                                              lat,
                                              lon,
                                              radius,
                                              force=options.force)

        gf_stores_path = op.join(project_dir, 'gf_stores')
        util.ensuredir(gf_stores_path)

    except scenario.CannotCreatePath as e:
        die(str(e) + ' Use --force to override.')

    except scenario.ScenarioError as e:
        die(str(e))
示例#13
0
    def get_insar_scenes(self):
        from kite import Scene
        if self._scenes is None:
            self._scenes = []
            path_insar = self.get_path('insar')
            util.ensuredir(path_insar)

            fns = util.select_files([path_insar],
                                    regex='\\.(npz)$',
                                    show_progress=False)
            for f in fns:
                self._scenes.append(Scene.load(f))

        return self._scenes
示例#14
0
    def dump_waveforms(self, engine, sources, path,
                       tmin=None, tmax=None, overwrite=False):
        path_waveforms = op.join(path, 'waveforms')
        util.ensuredir(path_waveforms)

        path_traces = op.join(
            path_waveforms,
            '%(wmin_year)s',
            '%(wmin_month)s',
            '%(wmin_day)s',
            'waveform_%(network)s_%(station)s_'
            + '%(location)s_%(channel)s_%(tmin)s_%(tmax)s.mseed')

        tmin_all, tmax_all = self.get_time_range(sources)
        tmin = tmin if tmin is not None else tmin_all
        tmax = tmax if tmax is not None else tmax_all
        tts = util.time_to_str

        tinc = self.get_useful_time_increment(engine, sources)
        tmin = math.floor(tmin / tinc) * tinc
        tmax = math.ceil(tmax / tinc) * tinc

        nwin = int(round((tmax - tmin) / tinc))

        for iwin in range(nwin):
            tmin_win = max(tmin, tmin + iwin*tinc)
            tmax_win = min(tmax, tmin + (iwin+1)*tinc)

            if tmax_win <= tmin_win:
                continue

            trs = self.get_waveforms(engine, sources, tmin_win, tmax_win)

            try:
                io.save(
                    trs, path_traces,
                    additional=dict(
                        wmin_year=tts(tmin_win, format='%Y'),
                        wmin_month=tts(tmin_win, format='%m'),
                        wmin_day=tts(tmin_win, format='%d'),
                        wmin=tts(tmin_win, format='%Y-%m-%d_%H-%M-%S'),
                        wmax_year=tts(tmax_win, format='%Y'),
                        wmax_month=tts(tmax_win, format='%m'),
                        wmax_day=tts(tmax_win, format='%d'),
                        wmax=tts(tmax_win, format='%Y-%m-%d_%H-%M-%S')),
                    overwrite=overwrite)
            except FileSaveError as e:
                logger.debug('Waveform exists %s' % e)

        return [path_waveforms]
示例#15
0
    def ensure_gfstores(self, interactive=False):
        if not self.stores_missing:
            return

        from pyrocko.gf import ws

        cfg = config.config()

        engine = self.get_engine()

        gf_store_superdirs = engine.store_superdirs

        if interactive:
            print('We could not find the following Green\'s function stores:\n'
                  '%s\n'
                  'We can try to download the stores from '
                  'http://kinherd.org into one of the following '
                  'directories.'
                  % '\n'.join('  ' + s for s in self.stores_missing))
            for idr, dr in enumerate(gf_store_superdirs):
                print(' %d. %s' % ((idr+1), dr))
            s = input('\nInto which directory should we download the GF '
                      'store(s)?\nDefault 1, (C)ancel: ')
            if s in ['c', 'C']:
                print('Canceled!')
                sys.exit(1)
            elif s == '':
                s = 0
            try:
                s = int(s)
                if s > len(gf_store_superdirs):
                    raise ValueError
            except ValueError:
                print('Invalid selection: %s' % s)
                sys.exit(1)
        else:
            s = 1

        download_dir = gf_store_superdirs[s-1]
        util.ensuredir(download_dir)
        logger.info('Downloading Green\'s functions stores to %s'
                    % download_dir)

        oldwd = os.getcwd()
        for store in self.stores_missing:
            os.chdir(download_dir)
            ws.download_gf_store(site='kinherd', store_id=store)

        os.chdir(oldwd)
示例#16
0
文件: scenario.py 项目: wsja/pyrocko
    def ensure_gfstores(self, interactive=False):
        if not self.stores_missing:
            return

        from pyrocko.gf import ws

        cfg = config.config()
        if len(cfg.gf_store_superdirs) == 0:
            store_dir = op.expanduser(
                op.join(config.pyrocko_dir_tmpl, 'gf_stores'))
            logger.debug('Creating default gf_store_superdirs: %s' % store_dir)

            util.ensuredir(store_dir)
            cfg.gf_store_superdirs = [store_dir]
            config.write_config(cfg)

        if interactive:
            print('We could not find the following Green\'s function stores:\n'
                  ' %s\n'
                  'We can try to download the stores from '
                  ' http://kinherd.org:8080 into Pyrocko\'s global GF cache.'
                  % '\n'.join(self.stores_missing))
            for idr, dr in enumerate(cfg.gf_store_superdirs):
                print(' %d. %s' % ((idr+1), dr))
            s = input('\nIn which cache directory shall the GF store'
                      ' be downloaded to?\n'
                      'Default 1, (C)ancel: ')
            if s in ['c', 'C']:
                print('Canceled!')
                sys.exit(1)
            elif s == '':
                s = 0
            try:
                s = int(s)
                if s > len(cfg.gf_store_superdirs):
                    raise ValueError
            except ValueError:
                print('Invalid selection: %s' % s)
                sys.exit(1)
        else:
            s = 1

        download_dir = cfg.gf_store_superdirs[s-1]
        logger.info('Downloading Green\'s functions stores to %s'
                    % download_dir)

        for store in self.stores_missing:
            os.chdir(download_dir)
            ws.download_gf_store(site='kinherd', store_id=store)
示例#17
0
    def ensure_gfstores(self, interactive=False):
        if not self.stores_missing:
            return

        from pyrocko.gf import ws

        cfg = config.config()

        engine = self.get_engine()

        gf_store_superdirs = engine.store_superdirs

        if interactive:
            print('We could not find the following Green\'s function stores:\n'
                  '%s\n'
                  'We can try to download the stores from '
                  'http://kinherd.org into one of the following '
                  'directories.' % '\n'.join('  ' + s
                                             for s in self.stores_missing))
            for idr, dr in enumerate(gf_store_superdirs):
                print(' %d. %s' % ((idr + 1), dr))
            s = input('\nInto which directory should we download the GF '
                      'store(s)?\nDefault 1, (C)ancel: ')
            if s in ['c', 'C']:
                print('Canceled!')
                sys.exit(1)
            elif s == '':
                s = 0
            try:
                s = int(s)
                if s > len(gf_store_superdirs):
                    raise ValueError
            except ValueError:
                print('Invalid selection: %s' % s)
                sys.exit(1)
        else:
            s = 1

        download_dir = gf_store_superdirs[s - 1]
        util.ensuredir(download_dir)
        logger.info('Downloading Green\'s functions stores to %s' %
                    download_dir)

        oldwd = os.getcwd()
        for store in self.stores_missing:
            os.chdir(download_dir)
            ws.download_gf_store(site='kinherd', store_id=store)

        os.chdir(oldwd)
示例#18
0
    def initialize(cls,
                   path,
                   center_lat=None,
                   center_lon=None,
                   radius=None,
                   targets=AVAILABLE_TARGETS,
                   force=False):
        """Initialize a Scenario and create a ``scenario.yml``

        :param path: Path to create the scenerio in
        :type path: str
        :param center_lat: Center latitude, defaults to None
        :type center_lat: float, optional
        :param center_lon: Center longitude, defaults to None
        :type center_lon: float, optional
        :param radius: Scenario's radius in [m], defaults to None
        :type radius: float, optional
        :param targets: Targets to thow into scenario,
            defaults to AVAILABLE_TARGETS
        :type targets: list of :class:`pyrocko.scenario.ScenarioTargets`,
            optional
        :param force: Overwrite directory, defaults to False
        :type force: bool, optional
        :returns: Scenario
        :rtype: :class:`pyrocko.scenario.ScenarioGenerator`
        """
        import os.path as op

        if op.exists(path) and not force:
            raise CannotCreate('Directory %s alread exists! May use force?' %
                               path)

        util.ensuredir(path)
        fn = op.join(path, 'scenario.yml')
        logger.debug('Writing new scenario to %s' % fn)

        scenario = cls()
        scenario.target_generators.extend([t() for t in targets])

        for gen in scenario.target_generators:
            gen.update_hierarchy(scenario)

        scenario.center_lat = center_lat
        scenario.center_lon = center_lon
        scenario.radius = radius

        scenario.dump(filename=fn)

        return scenario
    def get_waveform_pile(self):
        self.ensure_data()

        if self._pile is None:
            path_waveforms = self.get_path('waveforms')
            util.ensuredir(path_waveforms)
            fns = util.select_files(
                [path_waveforms], show_progress=False)

            self._pile = pile.Pile()
            if fns:
                self._pile.load_files(
                    fns, fileformat='mseed', show_progress=False)

        return self._pile
示例#20
0
    def get_waveform_pile(self):
        self.ensure_data()

        if self._pile is None:
            path_waveforms = self.get_path('waveforms')
            util.ensuredir(path_waveforms)
            fns = util.select_files([path_waveforms], show_progress=False)

            self._pile = pile.Pile()
            if fns:
                self._pile.load_files(fns,
                                      fileformat='mseed',
                                      show_progress=False)

        return self._pile
示例#21
0
    def ensure_data(self, engine, sources, path, tmin=None, tmax=None):
        path_gnss = op.join(path, 'gnss')
        util.ensuredir(path_gnss)

        fn = op.join(path_gnss,
                     'campaign-%s.yml' % self.station_generator.network_name)

        if op.exists(fn):
            return

        campaigns = self.get_gnss_campaigns(engine, sources, tmin, tmax)

        with open(fn, 'w') as f:
            for camp in campaigns:
                camp.dump(stream=f)
示例#22
0
    def __init__(self, store_dir, step, shared, block_size=None, tmp=None):

        if block_size is None:
            block_size = (51, 1, 51)

        self.store = gf.store.Store(store_dir, 'w')

        gf.builder.Builder.__init__(
            self, self.store.config, step, block_size=block_size)

        self.poel_config = self.store.get_extra('poel')

        self.tmp = tmp
        if self.tmp is not None:
            util.ensuredir(self.tmp)
    def dump_data(self, engine, sources, path,
                  tmin=None, tmax=None, overwrite=False):
        path_gnss = op.join(path, 'gnss')
        util.ensuredir(path_gnss)

        campaigns = self.get_gnss_campaign(
            engine, sources, tmin, tmax)

        fn = op.join(path_gnss,
                     'campaign-%s.yml' % self.station_generator.network_name)

        with open(fn, 'w') as f:
            for camp in campaigns:
                camp.dump(stream=f)

        return [fn]
示例#24
0
    def initialize(
            cls, path,
            center_lat=None, center_lon=None, radius=None,
            targets=AVAILABLE_TARGETS,
            force=False):
        """Initialize a Scenario and create a ``scenario.yml``

        :param path: Path to create the scenerio in
        :type path: str
        :param center_lat: Center latitude, defaults to None
        :type center_lat: float, optional
        :param center_lon: Center longitude, defaults to None
        :type center_lon: float, optional
        :param radius: Scenario's radius in [m], defaults to None
        :type radius: float, optional
        :param targets: Targets to thow into scenario,
            defaults to AVAILABLE_TARGETS
        :type targets: list of :class:`pyrocko.scenario.ScenarioTargets`,
            optional
        :param force: Overwrite directory, defaults to False
        :type force: bool, optional
        :returns: Scenario
        :rtype: :class:`pyrocko.scenario.ScenarioGenerator`
        """
        import os.path as op

        if op.exists(path) and not force:
            raise CannotCreate('Directory %s alread exists! May use force?'
                               % path)

        util.ensuredir(path)
        fn = op.join(path, 'scenario.yml')
        logger.debug('Writing new scenario to %s' % fn)

        scenario = cls()
        scenario.target_generators.extend([t() for t in targets])

        for gen in scenario.target_generators:
            gen.update_hierarchy(scenario)

        scenario.center_lat = center_lat
        scenario.center_lon = center_lon
        scenario.radius = radius

        scenario.dump(filename=fn)

        return scenario
示例#25
0
    def create_scenario(self,
                        force=False,
                        interactive=True,
                        gf_store_superdirs=None,
                        make_map=True):

        logger.info('Creating scenario...')

        scenario = self.get_scenario()
        self.create_project_dir(force)
        util.ensuredir(self.get_gf_stores_dir())

        data_dir = op.join(self.project_dir, self.data_dir)
        util.ensuredir(data_dir)

        scenario.dump(filename=op.join(data_dir, 'scenario.yml'))

        if gf_store_superdirs is None:
            engine1 = gf.LocalEngine(
                use_config=True, store_superdirs=[self.get_gf_stores_dir()])
        else:
            engine1 = gf.LocalEngine(use_config=False,
                                     store_superdirs=gf_store_superdirs)

        scenario.init_modelling(engine=engine1)

        scenario.ensure_gfstores(interactive=interactive)
        self.symlink_gfstores(engine1)

        engine2 = gf.LocalEngine(use_config=False,
                                 store_superdirs=[self.get_gf_stores_dir()])

        scenario.init_modelling(engine=engine2)

        scenario.dump_data(path=data_dir)
        if make_map:
            scenario.make_map(op.join(self.project_dir, 'scenario_map.pdf'))

        shutil.move(op.join(data_dir, 'sources.yml'),
                    op.join(data_dir, 'scenario_sources.yml'))

        markers = scenario.get_onsets()
        marker_path = op.join(data_dir, 'picks', 'picks.markers')
        if markers:
            util.ensuredirs(marker_path)
            pmarker.save_markers(markers, marker_path)
示例#26
0
    def setUpClass(cls):
        cls.cwd = os.getcwd()
        cls.run_dir = tutorial_run_dir()
        util.ensuredir(cls.run_dir)
        cls.dn = open(os.devnull, 'w')
        sys.stdout = cls.dn
        os.chdir(cls.run_dir)

        cls.snuffle_orig = snuffler.snuffle

        def snuffle_dummy(*args, **kwargs):
            pass

        snuffler.snuffle = snuffle_dummy

        cls.show_progress_force_off_orig = pile.show_progress_force_off
        pile.show_progress_force_off = True
示例#27
0
    def setUpClass(cls):
        cls.cwd = os.getcwd()
        cls.run_dir = tutorial_run_dir()
        util.ensuredir(cls.run_dir)
        cls.dn = open(os.devnull, 'w')
        sys.stdout = cls.dn
        os.chdir(cls.run_dir)

        cls.snuffle_orig = snuffler.snuffle

        def snuffle_dummy(*args, **kwargs):
            pass

        snuffler.snuffle = snuffle_dummy

        cls.show_progress_force_off_orig = pile.show_progress_force_off
        pile.show_progress_force_off = True
示例#28
0
    def dump_data(self, path, tmin=None, tmax=None, overwrite=False):
        self.source_generator.dump_data(path)

        meta_dir = op.join(path, 'meta')
        util.ensuredir(meta_dir)

        model.station.dump_stations(
            self.get_stations(), op.join(meta_dir, 'stations.txt'))
        model.station.dump_kml(
            self.get_stations(), op.join(meta_dir, 'stations.kml'))

        dump_readme(path)

        def dump_data(gen, *a, **kw):
            logger.info('Creating files from %s...' % gen.__class__.__name__)
            return gen.dump_data(self._engine, self.get_sources(), *a, **kw)

        return dump_data
示例#29
0
    def setUpClass(cls):
        from matplotlib import pyplot as plt

        cls.cwd = os.getcwd()
        cls.run_dir = tutorial_run_dir()
        util.ensuredir(cls.run_dir)
        cls.dn = open(os.devnull, 'w')
        sys.stdout = cls.dn
        os.chdir(cls.run_dir)

        cls._mpl_show_orig = plt.show
        plt.show = noop

        cls._snuffle_orig = snuffler.snuffle
        snuffler.snuffle = noop

        cls._show_progress_force_off_orig = pile.show_progress_force_off
        pile.show_progress_force_off = True
示例#30
0
    def setUpClass(cls):
        from matplotlib import pyplot as plt

        cls.cwd = os.getcwd()
        cls.run_dir = tutorial_run_dir()
        util.ensuredir(cls.run_dir)
        cls.dn = open(os.devnull, 'w')
        sys.stdout = cls.dn
        os.chdir(cls.run_dir)

        cls._mpl_show_orig = plt.show
        plt.show = noop

        cls._snuffle_orig = snuffler.snuffle
        snuffler.snuffle = noop

        cls._show_progress_force_off_orig = pile.show_progress_force_off
        pile.show_progress_force_off = True
示例#31
0
文件: qseis.py 项目: emolch/pyrocko
    def __init__(self, store_dir, step, shared, block_size=None, tmp=None,
                 force=False):

        self.store = gf.store.Store(store_dir, 'w')

        if block_size is None:
            block_size = (1, 1, 100)

        if len(self.store.config.ns) == 2:
            block_size = block_size[1:]

        gf.builder.Builder.__init__(
            self, self.store.config, step, block_size=block_size, force=force)

        baseconf = self.store.get_extra('qseis')

        conf = QSeisConfigFull(**baseconf.items())
        conf.earthmodel_1d = self.store.config.earthmodel_1d
        conf.earthmodel_receiver_1d = self.store.config.earthmodel_receiver_1d

        deltat = 1.0/self.gf_config.sample_rate

        if 'time_window_min' not in shared:
            d = self.store.make_timing_params(
                conf.time_region[0], conf.time_region[1],
                force=force)

            shared['time_window_min'] = d['tlenmax_vred']
            shared['time_start'] = d['tmin_vred']
            shared['time_reduction_velocity'] = d['vred'] / km

        time_window_min = shared['time_window_min']
        conf.time_start = shared['time_start']

        conf.time_reduction_velocity = shared['time_reduction_velocity']

        conf.nsamples = nextpow2(int(round(time_window_min / deltat)) + 1)
        conf.time_window = (conf.nsamples-1)*deltat

        self.qseis_config = conf

        self.tmp = tmp
        if self.tmp is not None:
            util.ensuredir(self.tmp)
示例#32
0
    def __init__(self, store_dir, step, shared, block_size=None, tmp=None ):
        self.gfmapping = [
            (MomentTensor( m=symmat6(1,0,0,1,0,0) ), {'un': (0, -1), 'ue': (3, -1), 'uz': (5, -1) }),
            (MomentTensor( m=symmat6(0,0,0,0,1,1) ), {'un': (1, -1), 'ue': (4, -1), 'uz': (6, -1) }),
            (MomentTensor( m=symmat6(0,0,1,0,0,0) ), {'un': (2, -1),                'uz': (7, -1) }),
            (MomentTensor( m=symmat6(0,1,0,0,0,0) ), {'un': (8, -1),                'uz': (9, -1) }),
        ]

        self.store = gf.store.Store(store_dir, 'w')

        if step == 0:
            block_size = (1,1,self.store.config.ndistances)
        else:
            if block_size is None:
                block_size = (1,1,51)

        if len(self.store.config.ns) == 2:
            block_size = block_size[1:]

        gf.builder.Builder.__init__(self, self.store.config, step, block_size=block_size)

        baseconf = self.store.get_extra('qssp')

        conf = QSSPConfigFull(**baseconf.items())
        conf.gf_directory = pjoin(store_dir, 'qssp_green')
        conf.earthmodel_1d = self.store.config.earthmodel_1d

        if 'time_window' not in shared:
            d = self.store.make_timing_params(conf.time_region[0], conf.time_region[1])
            shared['time_window'] = d['tmax']
            shared['tstart'] = d['tmin']

        deltat = self.store.config.deltat
        conf.time_window = shared['time_window']

        self.tmp = tmp
        if self.tmp is not None:
            util.ensuredir(self.tmp)

        util.ensuredir(conf.gf_directory)

        self.qssp_config = conf
示例#33
0
    def __init__(self, store_dir, step, shared, block_size=None, tmp=None ):
        self.gfmapping = [
            (MomentTensor( m=symmat6(1,0,0,1,0,0) ), {'r': (0, +1), 't': (3, +1), 'z': (5, +1) }),
            (MomentTensor( m=symmat6(0,0,0,0,1,1) ), {'r': (1, +1), 't': (4, +1), 'z': (6, +1) }),
            (MomentTensor( m=symmat6(0,0,1,0,0,0) ), {'r': (2, +1),               'z': (7, +1) }),
            (MomentTensor( m=symmat6(0,1,0,0,0,0) ), {'r': (8, +1),               'z': (9, +1) })] 

        self.store = gf.store.Store(store_dir, 'w')

        if block_size is None:
            block_size = (1,1,100)

        if len(self.store.config.ns) == 2:
            block_size = block_size[1:]

        gf.builder.Builder.__init__(self, self.store.config, step, block_size=block_size)
        baseconf = self.store.get_extra('qseis')

        conf = QSeisConfigFull(**baseconf.items())
        conf.earthmodel_1d = self.store.config.earthmodel_1d
        
        deltat = 1.0/self.gf_config.sample_rate

        if 'time_window_min' not in shared:
            d = self.store.make_timing_params(conf.time_region[0], conf.time_region[1])
            shared['time_window_min'] = d['tlenmax_vred']
            shared['time_start'] = d['tmin_vred']
            shared['time_reduction_velocity'] = d['vred'] / km

        time_window_min = shared['time_window_min']
        conf.time_start = shared['time_start']

        conf.time_reduction_velocity = shared['time_reduction_velocity']

        conf.nsamples = nextpow2(int(round(time_window_min / deltat)) + 1)
        conf.time_window = (conf.nsamples-1)*deltat

        self.qseis_config = conf

        self.tmp = tmp
        if self.tmp is not None:
            util.ensuredir(self.tmp)
示例#34
0
    def create(store_dir, deltat, nrecords, force=False):

        try:
            util.ensuredir(store_dir)
        except:
            raise CannotCreate('cannot create directory %s' % store_dir)

        index_fn = BaseStore.index_fn_(store_dir)
        data_fn = BaseStore.data_fn_(store_dir)

        for fn in (index_fn, data_fn):
            remove_if_exists(fn, force)

        with open(index_fn, 'wb') as f:
            f.write(struct.pack(gf_store_header_fmt, nrecords, deltat))
            records = num.zeros(nrecords, dtype=gf_record_dtype)
            records.tofile(f)

        with open(data_fn, 'wb') as f:
            f.write('\0' * 32)
示例#35
0
    def create(store_dir, deltat, nrecords, force=False):

        try:
            util.ensuredir(store_dir)
        except:
            raise CannotCreate('cannot create directory %s' % store_dir)

        index_fn = BaseStore.index_fn_(store_dir)
        data_fn = BaseStore.data_fn_(store_dir)

        for fn in (index_fn, data_fn):
            remove_if_exists(fn, force)

        with open(index_fn, 'wb') as f:
            f.write(struct.pack(gf_store_header_fmt, nrecords, deltat))
            records = num.zeros(nrecords, dtype=gf_record_dtype)
            records.tofile(f)

        with open(data_fn, 'wb') as f:
            f.write('\0' * 32)
示例#36
0
    def dump_data(self,
                  engine,
                  sources,
                  path,
                  tmin=None,
                  tmax=None,
                  overwrite=False):
        path_gnss = op.join(path, 'gnss')
        util.ensuredir(path_gnss)

        campaigns = self.get_gnss_campaign(engine, sources, tmin, tmax)

        fn = op.join(path_gnss,
                     'campaign-%s.yml' % self.station_generator.network_name)

        with open(fn, 'w') as f:
            for camp in campaigns:
                camp.dump(stream=f)

        return [fn]
示例#37
0
    def setUpClass(cls):
        from matplotlib import pyplot as plt

        cls.cwd = os.getcwd()
        cls.run_dir = tutorial_run_dir()
        util.ensuredir(cls.run_dir)
        cls.dn = open(os.devnull, 'w')
        sys.stdout = cls.dn
        os.chdir(cls.run_dir)

        plt.show_orig_testex = plt.show
        plt.show = noop

        snuffler.snuffle_orig_testex = snuffler.snuffle
        snuffler.snuffle = noop

        cls._show_progress_force_off_orig = pile.show_progress_force_off
        pile.show_progress_force_off = True

        orthodrome.raise_if_slow_path_contains_points = True
示例#38
0
    def __init__(self, store_dir, step, shared, block_size=None, tmp=None):

        self.store = gf.store.Store(store_dir, 'w')

        if block_size is None:
            block_size = (1, 1, 2000)

        if len(self.store.config.ns) == 2:
            block_size = block_size[1:]

        gf.builder.Builder.__init__(self,
                                    self.store.config,
                                    step,
                                    block_size=block_size)

        baseconf = self.store.get_extra('ahfullgreen')

        conf = AhfullgreenConfigFull(**baseconf.items())
        conf.earthmodel_1d = self.store.config.earthmodel_1d
        deltat = 1.0 / self.store.config.sample_rate
        conf.deltat = deltat

        if 'time_window_min' not in shared:
            d = self.store.make_timing_params(conf.time_region[0],
                                              conf.time_region[1])

            shared['time_window_min'] = d['tlenmax_vred']

        time_window_min = shared['time_window_min']

        conf.nsamples = nextpow2(int(round(time_window_min / deltat)) + 1)

        self.ahfullgreen_config = conf

        self.tmp = tmp
        if self.tmp is not None:
            util.ensuredir(self.tmp)
示例#39
0
    def __init__(self, config, hypers=False):

        self.model = None

        self._like_name = 'like'

        self.fixed_params = {}
        self.composites = {}
        self.hyperparams = {}

        logger.info('Analysing problem ...')
        logger.info('---------------------\n')

        # Load events
        if config.event is None:
            logger.warning('Found no event information!')
            raise AttributeError('Problem config has no event information!')
        else:
            self.event = config.event
            nsubevents = len(config.subevents)
            self.subevents = config.subevents

            if nsubevents > 0:
                logger.info('Found %i subevents.' % nsubevents)

        self.config = config

        mode = self.config.problem_config.mode

        outfolder = os.path.join(self.config.project_dir, mode)

        if hypers:
            outfolder = os.path.join(outfolder, 'hypers')

        self.outfolder = outfolder
        util.ensuredir(self.outfolder)
示例#40
0
def process_event(ievent, g_data_id):

    environment, force, preserve, status, nparallel = \
        g_state[g_data_id]

    config = environment.get_config()
    event_name = environment.get_selected_event_names()[ievent]
    nevents = environment.nevents_selected
    tstart = time.time()

    ds = config.get_dataset(event_name)
    event = ds.get_event()
    problem = config.get_problem(event)

    synt = ds.synthetic_test
    if synt:
        problem.base_source = problem.get_source(synt.get_x())

    check_problem(problem)

    rundir = expand_template(
        config.rundir_template,
        dict(problem_name=problem.name))
    environment.set_rundir_path(rundir)

    if op.exists(rundir):
        if preserve:
            nold_rundirs = len(glob.glob(rundir + '*'))
            shutil.move(rundir, rundir+'-old-%d' % (nold_rundirs))
        elif force:
            shutil.rmtree(rundir)
        else:
            logger.warn('Skipping problem "%s": rundir already exists: %s' %
                        (problem.name, rundir))
            return

    util.ensuredir(rundir)

    logger.info(
        'Starting event %i / %i' % (ievent+1, nevents))

    logger.info('Rundir: %s' % rundir)

    logger.info('Analysing problem "%s".' % problem.name)

    for analyser_conf in config.analyser_configs:
        analyser = analyser_conf.get_analyser()
        analyser.analyse(problem, ds)

    basepath = config.get_basepath()
    config.change_basepath(rundir)
    guts.dump(config, filename=op.join(rundir, 'config.yaml'))
    config.change_basepath(basepath)

    optimiser = config.optimiser_config.get_optimiser()
    optimiser.init_bootstraps(problem)
    problem.dump_problem_info(rundir)

    monitor = None
    if status == 'state':
        monitor = GrondMonitor.watch(rundir)

    xs_inject = None
    synt = ds.synthetic_test
    if synt and synt.inject_solution:
        xs_inject = synt.get_x()[num.newaxis, :]

    try:
        if xs_inject is not None:
            from .optimisers import highscore
            if not isinstance(optimiser, highscore.HighScoreOptimiser):
                raise GrondError(
                    'Optimiser does not support injections.')

            optimiser.sampler_phases[0:0] = [
                highscore.InjectionSamplerPhase(xs_inject=xs_inject)]

        optimiser.optimise(
            problem,
            rundir=rundir)

        harvest(rundir, problem, force=True)

    except BadProblem as e:
        logger.error(str(e))

    except GrondError as e:
        logger.error(str(e))

    finally:
        if monitor:
            monitor.terminate()

    tstop = time.time()
    logger.info(
        'Stop %i / %i (%g min)' % (ievent+1, nevents, (tstop - tstart)/60.))

    logger.info(
        'Done with problem "%s", rundir is "%s".' % (problem.name, rundir))
 def __init__(self, path, engine):
     self._scenario_suffix = 'scenario'
     self._path = path
     util.ensuredir(self._path)
     self._engine = engine
     self._load_scenarios()
示例#42
0
 def __init__(self, path, engine):
     self._scenario_suffix = 'scenario'
     self._path = path
     util.ensuredir(self._path)
     self._engine = engine
     self._load_scenarios()
示例#43
0
def sample(step, problem):
    """
    Sample solution space with the previously initalised algorithm.

    Parameters
    ----------

    step : :class:`SMC` or :class:`pymc3.metropolis.Metropolis`
        from problem.init_sampler()
    problem : :class:`Problem` with characteristics of problem to solve
    """

    sc = problem.config.sampler_config
    pa = sc.parameters

    if hasattr(pa, 'update_covariances'):
        if pa.update_covariances:
            update = problem
        else:
            update = None

    if sc.name == 'Metropolis':
        logger.info('... Starting Metropolis ...\n')

        ensuredir(problem.outfolder)

        sampler.metropolis_sample(
            n_steps=pa.n_steps,
            step=step,
            progressbar=sc.progressbar,
            buffer_size=sc.buffer_size,
            homepath=problem.outfolder,
            burn=pa.burn,
            thin=pa.thin,
            model=problem.model,
            n_jobs=pa.n_jobs,
            rm_flag=pa.rm_flag)

    elif sc.name == 'SMC':
        logger.info('... Starting SMC ...\n')

        sampler.smc_sample(
            pa.n_steps,
            step=step,
            progressbar=sc.progressbar,
            model=problem.model,
            n_jobs=pa.n_jobs,
            stage=pa.stage,
            update=update,
            homepath=problem.outfolder,
            buffer_size=sc.buffer_size,
            rm_flag=pa.rm_flag)

    elif sc.name == 'PT':
        logger.info('... Starting Parallel Tempering ...\n')

        sampler.pt_sample(
            step=step,
            n_chains=pa.n_chains,
            n_samples=pa.n_samples,
            swap_interval=pa.swap_interval,
            beta_tune_interval=pa.beta_tune_interval,
            n_workers_posterior=pa.n_chains_posterior,
            homepath=problem.outfolder,
            progressbar=sc.progressbar,
            buffer_size=sc.buffer_size,
            model=problem.model,
            rm_flag=pa.rm_flag)

    else:
        logger.error('Sampler "%s" not implemented.' % sc.name)
示例#44
0
def estimate_hypers(step, problem):
    """
    Get initial estimates of the hyperparameters
    """
    from beat.sampler.base import iter_parallel_chains, init_stage, \
        init_chain_hypers

    logger.info('... Estimating hyperparameters ...')

    pc = problem.config.problem_config
    sc = problem.config.hyper_sampler_config
    pa = sc.parameters

    name = problem.outfolder
    ensuredir(name)

    stage_handler = TextStage(problem.outfolder)
    chains, step, update = init_stage(
        stage_handler=stage_handler,
        step=step,
        stage=0,
        progressbar=sc.progressbar,
        model=problem.model,
        rm_flag=pa.rm_flag)

    # setting stage to 1 otherwise only one sample
    step.stage = 1
    step.n_steps = pa.n_steps

    with problem.model:
        mtrace = iter_parallel_chains(
            draws=pa.n_steps,
            chains=chains,
            step=step,
            stage_path=stage_handler.stage_path(1),
            progressbar=sc.progressbar,
            model=problem.model,
            n_jobs=pa.n_jobs,
            initializer=init_chain_hypers,
            initargs=(problem,),
            buffer_size=sc.buffer_size,
            chunksize=int(pa.n_chains / pa.n_jobs))

    for v, i in pc.hyperparameters.iteritems():
        d = mtrace.get_values(
            v, combine=True, burn=int(pa.n_steps * pa.burn),
            thin=pa.thin, squeeze=True)

        lower = num.floor(d.min()) - 2.
        upper = num.ceil(d.max()) + 2.
        logger.info('Updating hyperparameter %s from %f, %f to %f, %f' % (
            v, i.lower, i.upper, lower, upper))
        pc.hyperparameters[v].lower = num.atleast_1d(lower)
        pc.hyperparameters[v].upper = num.atleast_1d(upper)
        pc.hyperparameters[v].testvalue = num.atleast_1d((upper + lower) / 2.)

    config_file_name = 'config_' + pc.mode + '.yaml'
    conf_out = os.path.join(problem.config.project_dir, config_file_name)

    problem.config.problem_config = pc
    bconfig.dump(problem.config, filename=conf_out)
示例#45
0
文件: qseis2d.py 项目: emolch/pyrocko
    def __init__(self, store_dir, step, shared, block_size=None, tmp=None,
                 force=False):

        self.store = gf.store.Store(store_dir, 'w')

        storeconf = self.store.config

        if step == 0:
            block_size = (1, 1, storeconf.ndistances)
        else:
            if block_size is None:
                block_size = (1, 1, 1)  # QSeisR does only allow one receiver

        if len(storeconf.ns) == 2:
            block_size = block_size[1:]

        gf.builder.Builder.__init__(
            self, storeconf, step, block_size=block_size)

        baseconf = self.store.get_extra('qseis2d')

        conf_s = QSeisSConfigFull(**baseconf.qseis_s_config.items())
        conf_r = QSeisRConfigFull(**baseconf.qseis_r_config.items())

        conf_s.earthmodel_1d = storeconf.earthmodel_1d
        if storeconf.earthmodel_receiver_1d is not None:
            conf_r.earthmodel_receiver_1d = \
                storeconf.earthmodel_receiver_1d

        else:
            conf_r.earthmodel_receiver_1d = \
                storeconf.earthmodel_1d.extract(
                    depth_max='moho')
            # depth_max=conf_s.receiver_basement_depth*km)

        deltat = 1.0 / self.gf_config.sample_rate

        if 'time_window_min' not in shared:
            d = self.store.make_timing_params(
                baseconf.time_region[0], baseconf.time_region[1],
                force=force)

            shared['time_window_min'] = float(
                    num.ceil(d['tlenmax'] / self.gf_config.sample_rate) *
                    self.gf_config.sample_rate)
            shared['time_reduction'] = d['tmin_vred']

        time_window_min = shared['time_window_min']

        conf_s.nsamples = nextpow2(int(round(time_window_min / deltat)) + 1)
        conf_s.time_window = (conf_s.nsamples - 1) * deltat
        conf_r.time_reduction = shared['time_reduction']

        if step == 0:
            if 'slowness_window' not in shared:
                if conf_s.calc_slowness_window:
                    phases = [
                        storeconf.tabulated_phases[i].phases
                        for i in range(len(
                            storeconf.tabulated_phases))]

                    all_phases = []
                    map(all_phases.extend, phases)

                    mean_source_depth = num.mean((
                        storeconf.source_depth_min,
                        storeconf.source_depth_max))

                    arrivals = conf_s.earthmodel_1d.arrivals(
                        phases=all_phases,
                        distances=num.linspace(
                            conf_s.receiver_min_distance,
                            conf_s.receiver_max_distance,
                            100) * cake.m2d,
                        zstart=mean_source_depth)

                    ps = num.array(
                        [arrivals[i].p for i in range(len(arrivals))])

                    slownesses = ps / (cake.r2d * cake.d2m / km)

                    shared['slowness_window'] = (0.,
                                                 0.,
                                                 1.1 * float(slownesses.max()),
                                                 1.3 * float(slownesses.max()))

                else:
                    shared['slowness_window'] = conf_s.slowness_window

            conf_s.slowness_window = shared['slowness_window']

        self.qseis_s_config = conf_s
        self.qseis_r_config = conf_r
        self.qseis_baseconf = baseconf

        self.tmp = tmp
        if self.tmp is not None:
            util.ensuredir(self.tmp)

        util.ensuredir(baseconf.gf_directory)
示例#46
0
def main():
    parser = OptionParser(usage=usage, description=description)

    parser.add_option('--force',
                      dest='force',
                      action='store_true',
                      default=False,
                      help='allow recreation of output <directory>')

    parser.add_option('--debug',
                      dest='debug',
                      action='store_true',
                      default=False,
                      help='print debugging information to stderr')

    parser.add_option('--dry-run',
                      dest='dry_run',
                      action='store_true',
                      default=False,
                      help='show available stations/channels and exit '
                      '(do not download waveforms)')

    parser.add_option('--continue',
                      dest='continue_',
                      action='store_true',
                      default=False,
                      help='continue download after a accident')

    parser.add_option('--local-data',
                      dest='local_data',
                      action='append',
                      help='add file/directory with local data')

    parser.add_option('--local-stations',
                      dest='local_stations',
                      action='append',
                      help='add local stations file')

    parser.add_option('--selection',
                      dest='selection_file',
                      action='append',
                      help='add local stations file')

    parser.add_option(
        '--local-responses-resp',
        dest='local_responses_resp',
        action='append',
        help='add file/directory with local responses in RESP format')

    parser.add_option('--local-responses-pz',
                      dest='local_responses_pz',
                      action='append',
                      help='add file/directory with local pole-zero responses')

    parser.add_option(
        '--local-responses-stationxml',
        dest='local_responses_stationxml',
        help='add file with local response information in StationXML format')

    parser.add_option(
        '--window',
        dest='window',
        default='full',
        help='set time window to choose [full, p, "<time-start>,<time-end>"'
        '] (time format is YYYY-MM-DD HH:MM:SS)')

    parser.add_option(
        '--out-components',
        choices=['enu', 'rtu'],
        dest='out_components',
        default='rtu',
        help='set output component orientations to radial-transverse-up [rtu] '
        '(default) or east-north-up [enu]')

    parser.add_option('--out-units',
                      choices=['M', 'M/S', 'M/S**2'],
                      dest='output_units',
                      default='M',
                      help='set output units to displacement "M" (default),'
                      ' velocity "M/S" or acceleration "M/S**2"')

    parser.add_option(
        '--padding-factor',
        type=float,
        default=3.0,
        dest='padding_factor',
        help='extend time window on either side, in multiples of 1/<fmin_hz> '
        '(default: 5)')

    parser.add_option(
        '--zero-padding',
        dest='zero_pad',
        action='store_true',
        default=False,
        help='Extend traces by zero-padding if clean restitution requires'
        'longer windows')

    parser.add_option(
        '--credentials',
        dest='user_credentials',
        action='append',
        default=[],
        metavar='SITE,USER,PASSWD',
        help='user credentials for specific site to access restricted data '
        '(this option can be repeated)')

    parser.add_option(
        '--token',
        dest='auth_tokens',
        metavar='SITE,FILENAME',
        action='append',
        default=[],
        help='user authentication token for specific site to access '
        'restricted data (this option can be repeated)')

    parser.add_option(
        '--sites',
        dest='sites',
        metavar='SITE1,SITE2,...',
        default='geofon,iris,orfeus',
        help='sites to query (available: %s, default: "%%default"' %
        ', '.join(g_sites_available))

    parser.add_option(
        '--band-codes',
        dest='priority_band_code',
        metavar='V,L,M,B,H,S,E,...',
        default='B,H',
        help='select and prioritize band codes (default: %default)')

    parser.add_option(
        '--instrument-codes',
        dest='priority_instrument_code',
        metavar='H,L,G,...',
        default='H,L',
        help='select and prioritize instrument codes (default: %default)')

    parser.add_option('--radius-min',
                      dest='radius_min',
                      metavar='VALUE',
                      default=0.0,
                      type=float,
                      help='minimum radius [km]')

    parser.add_option('--nstations-wanted',
                      dest='nstations_wanted',
                      metavar='N',
                      type=int,
                      help='number of stations to select initially')

    (options, args) = parser.parse_args(sys.argv[1:])

    print('Parsed arguments:', args)
    if len(args) not in (10, 7, 6):
        parser.print_help()
        sys.exit(1)

    if options.debug:
        util.setup_logging(program_name, 'debug')
    else:
        util.setup_logging(program_name, 'info')

    if options.local_responses_pz and options.local_responses_resp:
        logger.critical('cannot use local responses in PZ and RESP '
                        'format at the same time')
        sys.exit(1)

    n_resp_opt = 0
    for resp_opt in (options.local_responses_pz, options.local_responses_resp,
                     options.local_responses_stationxml):

        if resp_opt:
            n_resp_opt += 1

    if n_resp_opt > 1:
        logger.critical('can only handle local responses from either PZ or '
                        'RESP or StationXML. Cannot yet merge different '
                        'response formats.')
        sys.exit(1)

    if options.local_responses_resp and not options.local_stations:
        logger.critical('--local-responses-resp can only be used '
                        'when --stations is also given.')
        sys.exit(1)

    try:
        ename = ''
        magnitude = None
        mt = None
        if len(args) == 10:
            time = util.str_to_time(args[1] + ' ' + args[2])
            lat = float(args[3])
            lon = float(args[4])
            depth = float(args[5]) * km
            iarg = 6

        elif len(args) == 7:
            if args[2].find(':') == -1:
                sname_or_date = None
                lat = float(args[1])
                lon = float(args[2])
                event = None
                time = None
            else:
                sname_or_date = args[1] + ' ' + args[2]

            iarg = 3

        elif len(args) == 6:
            sname_or_date = args[1]
            iarg = 2

        if len(args) in (7, 6) and sname_or_date is not None:
            events = get_events_by_name_or_date([sname_or_date],
                                                catalog=geofon)
            if len(events) == 0:
                logger.critical('no event found')
                sys.exit(1)
            elif len(events) > 1:
                logger.critical('more than one event found')
                sys.exit(1)

            event = events[0]
            time = event.time
            lat = event.lat
            lon = event.lon
            depth = event.depth
            ename = event.name
            magnitude = event.magnitude
            mt = event.moment_tensor

        radius = float(args[iarg]) * km
        fmin = float(args[iarg + 1])
        sample_rate = float(args[iarg + 2])

        eventname = args[iarg + 3]
        cwd = str(sys.argv[1])
        event_dir = op.join(cwd, 'data', 'events', eventname)
        output_dir = op.join(event_dir, 'waveforms')
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(time=time,
                            lat=lat,
                            lon=lon,
                            depth=depth,
                            name=ename,
                            magnitude=magnitude,
                            moment_tensor=mt)

    if options.window == 'full':
        if event is None:
            logger.critical('need event for --window=full')
            sys.exit(1)

        low_velocity = 1500.
        timewindow = VelocityWindow(low_velocity,
                                    tpad=options.padding_factor / fmin)

        tmin, tmax = timewindow(time, radius, depth)

    elif options.window == 'p':
        if event is None:
            logger.critical('need event for --window=p')
            sys.exit(1)

        phases = list(map(cake.PhaseDef, 'P p'.split()))
        emod = cake.load_model()

        tpad = options.padding_factor / fmin
        timewindow = PhaseWindow(emod, phases, -tpad, tpad)

        arrivaltimes = []
        for dist in num.linspace(0, radius, 20):
            try:
                arrivaltimes.extend(timewindow(time, dist, depth))
            except NoArrival:
                pass

        if not arrivaltimes:
            logger.error('required phase arrival not found')
            sys.exit(1)

        tmin = min(arrivaltimes)
        tmax = max(arrivaltimes)

    else:
        try:
            stmin, stmax = options.window.split(',')
            tmin = util.str_to_time(stmin.strip())
            tmax = util.str_to_time(stmax.strip())

            timewindow = FixedWindow(tmin, tmax)

        except ValueError:
            logger.critical('invalid argument to --window: "%s"' %
                            options.window)
            sys.exit(1)

    if event is not None:
        event.name = eventname

    tfade = tfade_factor / fmin

    tpad = tfade

    tmin -= tpad
    tmax += tpad

    tinc = None

    priority_band_code = options.priority_band_code.split(',')
    for s in priority_band_code:
        if len(s) != 1:
            logger.critical('invalid band code: %s' % s)

    priority_instrument_code = options.priority_instrument_code.split(',')
    for s in priority_instrument_code:
        if len(s) != 1:
            logger.critical('invalid instrument code: %s' % s)

    station_query_conf = dict(latitude=lat,
                              longitude=lon,
                              minradius=options.radius_min * km * cake.m2d,
                              maxradius=radius * cake.m2d,
                              channel=','.join('%s??' % s
                                               for s in priority_band_code))

    target_sample_rate = sample_rate

    fmax = target_sample_rate

    # target_sample_rate = None
    # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S']

    priority_units = ['M/S', 'M', 'M/S**2']

    # output_units = 'M'

    sites = [x.strip() for x in options.sites.split(',') if x.strip()]

    for site in sites:
        if site not in g_sites_available:
            logger.critical('unknown FDSN site: %s' % site)
            sys.exit(1)

    for s in options.user_credentials:
        try:
            site, user, passwd = s.split(',')
            g_user_credentials[site] = user, passwd
        except ValueError:
            logger.critical('invalid format for user credentials: "%s"' % s)
            sys.exit(1)

    for s in options.auth_tokens:
        try:
            site, token_filename = s.split(',')
            with open(token_filename, 'r') as f:
                g_auth_tokens[site] = f.read()
        except (ValueError, OSError, IOError):
            logger.critical('cannot get token from file: %s' % token_filename)
            sys.exit(1)

    fn_template0 = \
        'data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed'

    fn_template_raw = op.join(output_dir, 'raw', fn_template0)
    fn_stations_raw = op.join(output_dir, 'stations.raw.txt')
    fn_template_rest = op.join(output_dir, 'rest', fn_template0)
    fn_commandline = op.join(output_dir, 'beatdown.command')

    ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax)

    # chapter 1: download

    sxs = []
    for site in sites:
        try:
            extra_args = {
                'iris': dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == 'geonet':
                extra_args.update(starttime=tmin, endtime=tmax)
            else:
                extra_args.update(startbefore=tmax,
                                  endafter=tmin,
                                  includerestricted=(site in g_user_credentials
                                                     or site in g_auth_tokens))

            logger.info('downloading channel information (%s)' % site)
            sx = fdsn.station(site=site,
                              format='text',
                              level='channel',
                              **extra_args)

        except fdsn.EmptyResult:
            logger.error('No stations matching given criteria. (%s)' % site)
            sx = None

        if sx is not None:
            sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)

    nsl_to_sites = defaultdict(list)
    nsl_to_station = {}

    if options.selection_file:
        logger.info('using stations from stations file!')
        stations = []
        for fn in options.selection_file:
            stations.extend(model.load_stations(fn))

        nsls_selected = set(s.nsl() for s in stations)
    else:
        nsls_selected = None

    for sx, site in zip(sxs, sites):
        site_stations = sx.get_pyrocko_stations()
        for s in site_stations:
            nsl = s.nsl()

            nsl_to_sites[nsl].append(site)
            if nsl not in nsl_to_station:
                if nsls_selected:
                    if nsl in nsls_selected:
                        nsl_to_station[nsl] = s
                else:
                    nsl_to_station[
                        nsl] = s  # using first site with this station

        logger.info('number of stations found: %i' % len(nsl_to_station))

    # station weeding
    if options.nstations_wanted:
        nsls_selected = None
        stations_all = [
            nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())
        ]

        for s in stations_all:
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(stations_all,
                                                  options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info('number of stations selected: %i' % len(nsls_selected))

    if tinc is None:
        tinc = 3600.

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
        logger.info('time window %i/%i (%s - %s)' %
                    (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win)))

        have_data_this_window = set()
        if options.continue_:
            trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False)
            for tr in trs_avail:
                have_data_this_window.add(tr.nslc_id)
        for site, sx in zip(sites, sxs):
            if sx is None:
                continue

            selection = []
            channels = sx.choose_channels(
                target_sample_rate=target_sample_rate,
                priority_band_code=priority_band_code,
                priority_units=priority_units,
                priority_instrument_code=priority_instrument_code,
                timespan=(tmin_win, tmax_win))

            for nslc in sorted(channels.keys()):
                if nsls_selected is not None and nslc[:3] not in nsls_selected:
                    continue

                could_have_data_site[site].add(nslc)

                if nslc not in have_data_this_window:
                    channel = channels[nslc]
                    if event:
                        lat_, lon_ = event.lat, event.lon
                    else:
                        lat_, lon_ = lat, lon
                    try:
                        dist = orthodrome.distance_accurate50m_numpy(
                            lat_, lon_, channel.latitude.value,
                            channel.longitude.value)
                    except:
                        dist = orthodrome.distance_accurate50m_numpy(
                            lat_, lon_, channel.latitude, channel.longitude)

                    if event:
                        depth_ = event.depth
                        time_ = event.time
                    else:
                        depth_ = None
                        time_ = None

                    tmin_, tmax_ = timewindow(time_, dist, depth_)

                    tmin_this = tmin_ - tpad
                    tmax_this = float(tmax_ + tpad)

                    tmin_req = max(tmin_win, tmin_this)
                    tmax_req = min(tmax_win, tmax_this)
                    if channel.sample_rate:
                        try:
                            deltat = 1.0 / int(channel.sample_rate.value)
                        except:
                            deltat = 1.0 / int(channel.sample_rate)
                    else:
                        deltat = 1.0

                    if tmin_req < tmax_req:
                        logger.debug('deltat %f' % deltat)
                        # extend time window by some samples because otherwise
                        # sometimes gaps are produced
                        # apparently the WS are only sensitive to full seconds
                        # round to avoid gaps, increase safetiy window
                        selection.append(nslc +
                                         (math.floor(tmin_req - deltat * 20.0),
                                          math.ceil(tmax_req + deltat * 20.0)))
            if options.dry_run:
                for (net, sta, loc, cha, tmin, tmax) in selection:
                    available_through[net, sta, loc, cha].add(site)

            else:
                neach = 100
                i = 0
                nbatches = ((len(selection) - 1) // neach) + 1
                while i < len(selection):
                    selection_now = selection[i:i + neach]
                    f = tempfile.NamedTemporaryFile()
                    try:
                        sbatch = ''
                        if nbatches > 1:
                            sbatch = ' (batch %i/%i)' % (
                                (i // neach) + 1, nbatches)

                        logger.info('downloading data (%s)%s' % (site, sbatch))
                        data = fdsn.dataselect(site=site,
                                               selection=selection_now,
                                               **get_user_credentials(site))

                        while True:
                            buf = data.read(1024)
                            if not buf:
                                break
                            f.write(buf)

                        f.flush()

                        trs = io.load(f.name)
                        for tr in trs:
                            tr.fix_deltat_rounding_errors()
                            logger.debug('cutting window: %f - %f' %
                                         (tmin_win, tmax_win))
                            logger.debug(
                                'available window: %f - %f, nsamples: %g' %
                                (tr.tmin, tr.tmax, tr.ydata.size))
                            try:
                                logger.debug('tmin before snap %f' % tr.tmin)
                                tr.snap(interpolate=True)
                                logger.debug('tmin after snap %f' % tr.tmin)
                                tr.chop(tmin_win,
                                        tmax_win,
                                        snap=(math.floor, math.ceil),
                                        include_last=True)
                                logger.debug(
                                    'cut window: %f - %f, nsamles: %g' %
                                    (tr.tmin, tr.tmax, tr.ydata.size))
                                have_data.add(tr.nslc_id)
                                have_data_site[site].add(tr.nslc_id)
                            except trace.NoData:
                                pass

                        fns2 = io.save(trs, fn_template_raw)
                        for fn in fns2:
                            if fn in fns:
                                logger.warn('overwriting file %s', fn)
                        fns.extend(fns2)

                    except fdsn.EmptyResult:
                        pass

                    except HTTPError:
                        logger.warn('an error occurred while downloading data '
                                    'for channels \n  %s' %
                                    '\n  '.join('.'.join(x[:4])
                                                for x in selection_now))

                    f.close()
                    i += neach

    if options.dry_run:
        nslcs = sorted(available_through.keys())

        all_channels = defaultdict(set)
        all_stations = defaultdict(set)

        def plural_s(x):
            return '' if x == 1 else 's'

        for nslc in nslcs:
            sites = tuple(sorted(available_through[nslc]))
            logger.info('selected: %s.%s.%s.%s from site%s %s' %
                        (nslc + (plural_s(len(sites)), '+'.join(sites))))

            all_channels[sites].add(nslc)
            all_stations[sites].add(nslc[:3])

        nchannels_all = 0
        nstations_all = 0
        for sites in sorted(all_channels.keys(),
                            key=lambda sites: (-len(sites), sites)):

            nchannels = len(all_channels[sites])
            nstations = len(all_stations[sites])
            nchannels_all += nchannels
            nstations_all += nstations
            logger.info('selected (%s): %i channel%s (%i station%s)' %
                        ('+'.join(sites), nchannels, plural_s(nchannels),
                         nstations, plural_s(nstations)))

        logger.info('selected total: %i channel%s (%i station%s)' %
                    (nchannels_all, plural_s(nchannels_all), nstations_all,
                     plural_s(nstations_all)))

        logger.info('dry run done.')
        sys.exit(0)

    for nslc in have_data:
        # if we are in continue mode, we have to guess where the data came from
        if not any(nslc in have_data_site[site] for site in sites):
            for site in sites:
                if nslc in could_have_data_site[site]:
                    have_data_site[site].add(nslc)

    sxs = {}
    for site in sites:
        selection = []
        for nslc in sorted(have_data_site[site]):
            selection.append(nslc + (tmin - tpad, tmax + tpad))

        if selection:
            logger.info('downloading response information (%s)' % site)
            sxs[site] = fdsn.station(site=site,
                                     level='response',
                                     selection=selection)

            sxs[site].dump_xml(filename=op.join(output_dir, 'stations.%s.xml' %
                                                site))

    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site['local'] = set()
        plocal = pile.make_pile(options.local_data, fileformat='detect')
        logger.info(
            'Importing local data from %s between %s (%f) and %s (%f)' %
            (options.local_data, util.time_to_str(tmin), tmin,
             util.time_to_str(tmax), tmax))
        for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                             tmin=tmin,
                                             tmax=tmax,
                                             tinc=tinc):

            for tr in traces:
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site['local'].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)

        sites.append('local')

    if options.local_responses_pz:
        sxs['local'] = epz.make_stationxml(
            epz.iload(options.local_responses_pz))

    if options.local_responses_resp:
        local_stations = []
        for fn in options.local_stations:
            local_stations.extend(model.load_stations(fn))

        sxs['local'] = resp.make_stationxml(
            local_stations, resp.iload(options.local_responses_resp))

    if options.local_responses_stationxml:
        sxs['local'] = stationxml.load_xml(
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error('no data available')
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.)
    otinc = 3600.
    otmin = math.floor(p.tmin / otinc) * otinc
    otmax = math.ceil(p.tmax / otinc) * otinc
    otpad = tpad * 2

    fns = []
    rest_traces_b = []
    win_b = None
    for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                      tmin=otmin,
                                      tmax=otmax,
                                      tinc=otinc,
                                      tpad=otpad):

        rest_traces_a = []
        win_a = None
        for tr in traces_a:
            win_a = tr.wmin, tr.wmax

            if win_b and win_b[0] >= win_a[0]:
                fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))
                rest_traces_b = []
                win_b = None

            response = None
            failure = []
            for site in sites:
                try:
                    if site not in sxs:
                        continue
                    logger.debug('Getting response for %s' % tr.__str__())
                    response = sxs[site].get_pyrocko_response(
                        tr.nslc_id,
                        timespan=(tr.tmin, tr.tmax),
                        fake_input_units=options.output_units)

                    break

                except stationxml.NoResponseInformation:
                    failure.append('%s: no response information' % site)

                except stationxml.MultipleResponseInformation:
                    failure.append('%s: multiple response information' % site)

            if response is None:
                failure = ', '.join(failure)

            else:
                failure = ''
                try:
                    if tr.tmin > tmin and options.zero_pad:
                        logger.warning(
                            'Trace too short for clean restitution in '
                            'desired frequency band -> zero-padding!')
                        tr.extend(tr.tmin - tfade, tr.tmax + tfade, 'repeat')

                    rest_tr = tr.transfer(tfade, ftap, response, invert=True)
                    rest_traces_a.append(rest_tr)

                except (trace.TraceTooShort, trace.NoData):
                    failure = 'trace too short'

            if failure:
                logger.warn('failed to restitute trace %s.%s.%s.%s (%s)' %
                            (tr.nslc_id + (failure, )))

        if rest_traces_b:
            rest_traces = trace.degapper(rest_traces_b + rest_traces_a,
                                         deoverlap='crossfade_cos')

            fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest))
            rest_traces_a = []
            if win_a:
                for tr in rest_traces:
                    try:
                        rest_traces_a.append(
                            tr.chop(win_a[0], win_a[1] + otpad, inplace=False))
                    except trace.NoData:
                        pass

        rest_traces_b = rest_traces_a
        win_b = win_a

    fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))

    # chapter 3: rotated restituted traces for inspection

    if not event:
        sys.exit(0)

    fn_template1 = \
        'DISPL.%(network)s.%(station)s.%(location)s.%(channel)s'

    fn_waveforms = op.join(output_dir, 'prepared', fn_template1)
    fn_stations = op.join(output_dir, 'stations.prepared.txt')
    fn_event = op.join(event_dir, 'event.txt')
    fn_event_yaml = op.join(event_dir, 'event.yaml')

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    p = pile.make_pile(fns, show_progress=False)

    deltat = None
    if sample_rate is not None:
        deltat = 1.0 / sample_rate

    traces_beat = []
    used_stations = []
    for nsl, s in nsl_to_station.items():
        s.set_event_relative_data(event)
        traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl)

        if options.out_components == 'rtu':
            pios = s.guess_projections_to_rtu(out_channels=('R', 'T', 'Z'))
        elif options.out_components == 'enu':
            pios = s.guess_projections_to_enu(out_channels=('E', 'N', 'Z'))
        else:
            assert False

        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                tr_beat = heart.SeismicDataset.from_pyrocko_trace(tr)
                traces_beat.append(tr_beat)
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)

            if proc:
                io.save(proc, fn_waveforms)
                used_stations.append(s)

    stations = list(used_stations)
    util.ensuredirs(fn_stations)
    model.dump_stations(stations, fn_stations)
    model.dump_events([event], fn_event)

    from pyrocko.guts import dump
    dump([event], filename=fn_event_yaml)

    utility.dump_objects(op.join(cwd, 'seismic_data.pkl'),
                         outlist=[stations, traces_beat])
    logger.info('prepared waveforms from %i stations' % len(stations))
示例#47
0
def report(env,
           report_config=None,
           update_without_plotting=True,
           make_index=True,
           make_archive=True,
           nthreads=0):

    if report_config is None:
        report_config = ReportConfig()
        report_config.set_basepath('.')

    event_name = env.get_current_event_name()
    logger.info('Creating report entry for run "%s"...' % event_name)

    fp = report_config.expand_path
    entry_path = expand_template(
        op.join(fp(report_config.report_base_path),
                report_config.entries_sub_path),
        dict(event_name=event_name, problem_name=event_name))

    if op.exists(entry_path) and not update_without_plotting:
        shutil.rmtree(entry_path)

    util.ensuredir(entry_path)
    plots_dir_out = op.join(entry_path, 'plots')
    util.ensuredir(plots_dir_out)
    configs_dir = op.join(op.split(__file__)[0], 'app/configs/')
    rundir_path = env.get_rundir_path()
    try:
        os.system("cp -r %s/grun/plots/* %s" % (rundir_path, plots_dir_out))
    except:
        pass
    os.system("cp %s/plot_collection.yaml %s" % (configs_dir, plots_dir_out))

    util.ensuredir("%s/shakemap/default/" % (plots_dir_out))
    os.system(
        "cp %s/*shakemap.png %s/shakemap/default/shakemap.default.gf_shakemap.d100.png"
        % (rundir_path, plots_dir_out))
    os.system("cp %s/shakemap.default.plot_group.yaml %s/shakemap/default/" %
              (configs_dir, plots_dir_out))

    util.ensuredir("%s/location/default/" % (plots_dir_out))
    os.system(
        "cp %s/*location.png %s/location/default/location.default.location.d100.png"
        % (rundir_path, plots_dir_out))
    os.system("cp %s/location.default.plot_group.yaml %s/location/default/" %
              (configs_dir, plots_dir_out))

    util.ensuredir("%s/production_data/default/" % (plots_dir_out))
    os.system(
        "cp %s/*production_data.png %s/production_data/default/production_data.default.production_data.d100.png"
        % (rundir_path, plots_dir_out))
    os.system(
        "cp %s/production_data.default.plot_group.yaml %s/production_data/default/"
        % (configs_dir, plots_dir_out))

    util.ensuredir("%s/waveforms/default/" % (plots_dir_out))
    os.system(
        "cp %s/waveforms_1.png %s/waveforms/default/waveforms.default.waveforms_1.d100.png"
        % (rundir_path, plots_dir_out))
    os.system(
        "cp %s/waveforms_2.png %s/waveforms/default/waveforms.default.waveforms_2.d100.png"
        % (rundir_path, plots_dir_out))
    os.system(
        "cp %s/waveforms_3.png %s/waveforms/default/waveforms.default.waveforms_3.d100.png"
        % (rundir_path, plots_dir_out))
    os.system("cp %s/waveforms.default.plot_group.yaml %s/waveforms/default/" %
              (configs_dir, plots_dir_out))

    os.system("cp %s/grun/config.yaml %s/config.yaml" %
              (rundir_path, entry_path))

    try:

        event = model.load_events(rundir_path + "event.txt")
        model.dump_events(event,
                          filename=op.join(entry_path, 'event_reference.yaml'),
                          format="yaml")
        event = event[0]
        from silvertine import plot
        pcc = report_config.plot_config_collection.get_weeded(env)
        plot.make_plots(env,
                        plots_path=op.join(entry_path, 'plots'),
                        plot_config_collection=pcc)

        try:
            run_info = env.get_run_info()
        except environment.NoRundirAvailable:
            run_info = None

        rie = ReportIndexEntry(path='.',
                               problem_name=event_name,
                               silvertine_version="0.01",
                               run_info=run_info)

        fn = op.join(entry_path, 'event_reference.yaml')
        if op.exists(fn):
            rie.event_best = guts.load(filename=fn)

        fn = op.join(entry_path, 'event_reference.yaml')
        if op.exists(fn):
            rie.event_reference = guts.load(filename=fn)

        fn = op.join(entry_path, 'index.yaml')
        guts.dump(rie, filename=fn)

        logger.info('Done creating report entry for run "%s".' % "test")

    #    report_index(report_config)

    #    if make_archive:
    #        report_archive(report_config)
    except FileNotFoundError:
        pass
示例#48
0
 def ensure_gf_directory(self):
     util.ensuredir(self.gf_directory)
示例#49
0
 def __init__(self, base_dir):
     self.base_dir = base_dir
     self.project_dir = os.path.dirname(base_dir)
     self.mode = os.path.basename(base_dir)
     util.ensuredir(self.base_dir)
示例#50
0
def download_raw(path,
                 tmint,
                 tmaxt,
                 seiger=True,
                 selection=None,
                 providers=["bgr", "http://ws.gpi.kit.edu"],
                 clean=True,
                 detector=False,
                 common_f=80,
                 tinc=None):
    try:
        tmin = util.stt(tmint)
        tmax = util.stt(tmaxt)
    except:
        tmin = tmint
        tmax = tmaxt
    util.ensuredir(path + "/downloads")
    for provider in providers:
        if clean is True and detector is True:
            subprocess.run(['rm -r %s*' % (path + '/downloads/')], shell=True)
        if seiger is True:
            selection = get_seiger_stations(tmin, tmax)

        request_waveform = fdsn.dataselect(site=provider, selection=selection)
        # write the incoming data stream to 'traces.mseed'
        if provider == "http://ws.gpi.kit.edu":
            provider = "kit"
        if provider == "http://192.168.11.220:8080":
            provider = "bgr"
        download_basepath = os.path.join(path, "traces_%s.mseed" % provider)

        with open(download_basepath, 'wb') as file:
            file.write(request_waveform.read())

        traces = io.load(download_basepath)
        if common_f is not None:
            for tr in traces:
                if tr.deltat != common_f:
                    tr.downsample_to(1 / common_f)
                    tr.ydata = tr.ydata.astype(num.int32)
        if detector is True:
            for tr in traces:
                tr.chop(tmin, tmax)
                date_min = get_time_format_eq(tr.tmin)
                date_max = get_time_format_eq(tr.tmax)
                io.save(
                    tr, "%sdownloads/%s/%s.%s..%s__%s__%s.mseed" %
                    (path, tr.station, tr.network, tr.station, tr.channel,
                     date_min, date_max))
        else:
            util.ensuredir("%s/downloads/" % path)
            window_start = traces[0].tmin
            window_end = traces[0].tmax
            timestring = util.time_to_str(window_start, format='%Y-%m')
            io.save(
                traces, "%s/%s/%s_%s_%s.mseed" %
                (path, timestring, provider, tmin, tmax))
    if clean is True:
        for provider in providers:
            if provider == "http://192.168.11.220:8080":
                provider = "bgr"
            if provider == "http://ws.gpi.kit.edu":
                provider = "kit"
            subprocess.run(
                ['rm -r %s*' % (path + '/traces_%s.mseed' % provider)],
                shell=True,
                stdout=DEVNULL,
                stderr=STDOUT)
示例#51
0
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(time=time,
                            lat=lat,
                            lon=lon,
                            depth=depth,
                            name=ename,
                            magnitude=magnitude,
                            moment_tensor=mt)

    if options.window == 'full':
        if event is None:
            logger.critical('need event for --window=full')
            sys.exit(1)
示例#52
0
def Metropolis_sample(n_stages=10, n_steps=10000, trace=None, start=None,
            progressbar=False, stage=None, rm_flag=False,
            step=None, model=None, n_jobs=1, update=None, burn=0.5, thin=2):
    """
    Execute Metropolis algorithm repeatedly depending on the number of stages.
    The start point of each stage set to the end point of the previous stage.
    Update covariances if given.
    """

    model = pm.modelcontext(model)
    step.n_steps = int(n_steps)

    if n_steps < 1:
        raise Exception('Argument `n_steps` should be above 0.', exc_info=1)

    if step is None:
        raise Exception('Argument `step` has to be a TMCMC step object.')

    if trace is None:
        raise Exception('Argument `trace` should be path to result_directory.')

    if n_jobs > 1:
        if not (step.n_chains / float(n_jobs)).is_integer():
            raise Exception('n_chains / n_jobs has to be a whole number!')

    if start is not None:
        if len(start) != step.n_chains:
            raise Exception('Argument `start` should have dicts equal the '
                            'number of chains (step.N-chains)')
        else:
            step.population = start

    if not any(
            step.likelihood_name in var.name for var in model.deterministics):
            raise Exception('Model (deterministic) variables need to contain '
                            'a variable %s '
                            'as defined in `step`.' % step.likelihood_name)

    homepath = trace

    util.ensuredir(homepath)

    chains, step, update = init_stage(
        homepath=homepath,
        step=step,
        stage=stage,
        n_jobs=n_jobs,
        progressbar=progressbar,
        update=update,
        model=model,
        rm_flag=rm_flag)

    # set beta to 1 - standard Metropolis sampling
    step.beta = 1.
    step.n_jobs = n_jobs

    with model:

        for s in range(int(stage), n_stages):

            stage_path = os.path.join(homepath, 'stage_%i' % s)
            logger.info('Sampling stage %s' % stage_path)

            if s == 0:
                draws = 1
            else:
                draws = n_steps

            if not os.path.exists(stage_path):
                chains = None

            step.stage = s

            sample_args = {
                    'draws': draws,
                    'step': step,
                    'stage_path': stage_path,
                    'progressbar': progressbar,
                    'model': model,
                    'n_jobs': n_jobs,
                    'chains': chains}

            _iter_parallel_chains(**sample_args)

            mtrace = backend.load(stage_path, model)

            step.population, step.array_population, step.likelihoods = \
                                    step.select_end_points(mtrace)

            pdict, step.covariance = get_trace_stats(
                mtrace, step, burn, thin)

            if step.proposal_name == 'MultivariateNormal':
                step.proposal_dist = choose_proposal(
                    step.proposal_name, scale=step.covariance)

            if update is not None:
                logger.info('Updating Covariances ...')
                update.update_weights(pdict['dist_mean'], n_jobs=n_jobs)

                mtrace = update_last_samples(
                    homepath, step, progressbar, model, n_jobs, rm_flag)

            elif update is not None and stage == 0:
                update.engine.close_cashed_stores()

            step.chain_previous_lpoint = step.get_chain_previous_lpoint(mtrace)

            outpath = os.path.join(stage_path, sample_p_outname)
            outparam_list = [step, update]
            utility.dump_objects(outpath, outparam_list)

        get_final_stage(homepath, n_stages, model=model)
        outpath = os.path.join(homepath, 'stage_final', sample_p_outname)
        utility.dump_objects(outpath, outparam_list)