Ejemplo n.º 1
0
    def add_scenario(self, scenario_id, scenario_generator):

        if scenario_generator.seed is None:
            scenario_generator = guts.clone(scenario_generator)
            scenario_generator.seed = num.random.randint(1, 2**32 - 1)

        path = self.get_path(scenario_id)
        try:
            os.mkdir(path)
        except OSError as e:
            if e.errno == errno.EEXIST:
                raise ScenarioError('scenario id is already in use: %s' %
                                    scenario_id)
            else:
                raise

        scenario = ScenarioCollectionItem(scenario_id=scenario_id,
                                          time_created=time.time())

        scenario_path = self.get_path(scenario_id, 'scenario.yaml')
        guts.dump(scenario, filename=scenario_path)

        generator_path = self.get_path(scenario_id, 'generator.yaml')
        guts.dump(scenario_generator, filename=generator_path)

        scenario.set_base_path(self.get_path(scenario_id))
        scenario.init_modelling(self._engine)

        self._scenarios.append(scenario)
Ejemplo n.º 2
0
def load_config(project_dir, mode):
    """
    Load configuration file.

    Parameters
    ----------
    project_dir : str
        path to the directory of the configuration file
    mode : str
        type of optimization problem: 'Geometry' / 'Static'/ 'Kinematic'

    Returns
    -------
    :class:`BEATconfig`
    """
    config_file_name = 'config_' + mode + '.yaml'

    config_fn = os.path.join(project_dir, config_file_name)
    config = load(filename=config_fn)

    if config.problem_config.hyperparameters is None or \
        len(config.problem_config.hyperparameters) == 0:
        config.update_hypers()
        logger.info('Updated hyper parameters!')
        dump(config, filename=config_fn)

    return config
Ejemplo n.º 3
0
def load_config(project_dir, mode, update=False):
    """
    Load configuration file.

    Parameters
    ----------
    project_dir : str
        path to the directory of the configuration file
    mode : str
        type of optimization problem: 'geometry' / 'static'/ 'kinematic'

    Returns
    -------
    :class:`BEATconfig`
    """
    config_file_name = 'config_' + mode + '.yaml'

    config_fn = os.path.join(project_dir, config_file_name)

    try:
        config = load(filename=config_fn)
    except IOError:
        raise IOError('Cannot load config, file %s'
                      ' does not exist!' % config_fn)

    config.problem_config.validate_priors()

    if update:
        config.update_hypers()
        logger.info('Updated hyper parameters! Previous hyper'
                    ' parameter bounds are invalid now!')
        dump(config, filename=config_fn)

    return config
Ejemplo n.º 4
0
    def testIOEvent(self):
        tempdir = tempfile.mkdtemp(prefix='pyrocko-model')
        fn = pjoin(tempdir, 'event.txt')
        e1 = model.Event(10.,
                         20.,
                         1234567890.,
                         'bubu',
                         depth=10.,
                         region='taka tuka land',
                         moment_tensor=moment_tensor.MomentTensor(strike=45.,
                                                                  dip=90),
                         magnitude=5.1,
                         magnitude_type='Mw',
                         tags=['cluster:-1', 'custom_magnitude:2.5'])

        guts.dump(e1, filename=fn)
        e2 = guts.load(filename=fn)
        assert e1.region == e2.region
        assert e1.name == e2.name
        assert e1.lat == e2.lat
        assert e1.lon == e2.lon
        assert e1.time == e2.time
        assert e1.region == e2.region
        assert e1.magnitude == e2.magnitude
        assert e1.magnitude_type == e2.magnitude_type
        assert e1.get_hash() == e2.get_hash()
        assert e1.tags == e2.tags

        fn2 = pjoin(tempdir, 'events.txt')
        guts.dump_all([e1, e2], filename=fn2)

        with self.assertRaises(model.OneEventRequired):
            model.load_one_event(fn2)

        shutil.rmtree(tempdir)
Ejemplo n.º 5
0
    def dump(x, gm, indices):
        if type == 'vector':
            print(' ',
                  ' '.join('%16.7g' % problem.extract(x, i) for i in indices),
                  '%16.7g' % gm,
                  file=out)

        elif type == 'source':
            source = problem.get_source(x)
            if effective_lat_lon:
                source.set_origin(*source.effective_latlon)
            guts.dump(source, stream=out)

        elif type == 'event':
            ev = problem.get_source(x).pyrocko_event()
            if effective_lat_lon:
                ev.set_origin(*ev.effective_latlon)

            model.dump_events([ev], stream=out)

        elif type == 'event-yaml':
            ev = problem.get_source(x).pyrocko_event()
            if effective_lat_lon:
                ev.set_origin(*ev.effective_latlon)
            guts.dump_all([ev], stream=out)

        else:
            raise GrondError('Invalid argument: type=%s' % repr(type))
Ejemplo n.º 6
0
def report_index(report_config=None):
    if report_config is None:
        report_config = ReportConfig()

    reports_base_path = report_config.reports_base_path
    reports = []
    for report_path in iter_report_dirs(reports_base_path):

        fn = op.join(report_path, 'index.yaml')
        if not os.path.exists(fn):
            logger.warn('Skipping indexing of incomplete report: %s' %
                        report_path)

            continue

        logger.info('Indexing %s...' % report_path)

        rie = guts.load(filename=fn)
        report_relpath = op.relpath(report_path, reports_base_path)
        rie.path = report_relpath
        reports.append(rie)

    guts.dump_all(reports,
                  filename=op.join(reports_base_path, 'report_list.yaml'))

    from grond import info
    guts.dump(info.version_info(),
              filename=op.join(reports_base_path, 'version_info.yaml'))

    app_dir = op.join(op.split(__file__)[0], 'app')
    copytree(app_dir, reports_base_path)
    logger.info('Created report in %s/index.html' % reports_base_path)
Ejemplo n.º 7
0
def report_index(report_config=None):
    if report_config is None:
        report_config = ReportConfig()

    report_base_path = report_config.report_base_path
    entries = []
    for entry_path in iter_report_entry_dirs(report_base_path):
        fn = op.join(entry_path, 'index.yaml')
        if not os.path.exists(fn):
            logger.warn('Skipping indexing of incomplete report entry: %s' %
                        entry_path)

            continue

        logger.info('Indexing %s...' % entry_path)

        rie = guts.load(filename=fn)
        report_relpath = op.relpath(entry_path, report_base_path)
        rie.path = report_relpath
        entries.append(rie)

    guts.dump_all(entries,
                  filename=op.join(report_base_path, 'report_list.yaml'))

    guts.dump(ReportInfo(title=report_config.title,
                         description=report_config.description,
                         have_archive=report_config.make_archive),
              filename=op.join(report_base_path, 'info.yaml'))

    app_dir = op.join(op.split(__file__)[0], 'app')
    copytree(app_dir, report_base_path)
    logger.info('Created report in %s/index.html' % report_base_path)
Ejemplo n.º 8
0
    def testIOEvent(self):
        tempdir = tempfile.mkdtemp(prefix='pyrocko-model')
        fn = pjoin(tempdir, 'event.txt')
        e1 = model.Event(
            10., 20., 1234567890., 'bubu', region='taka tuka land',
            moment_tensor=moment_tensor.MomentTensor(strike=45., dip=90),
            magnitude=5.1, magnitude_type='Mw')
        guts.dump(e1, filename=fn)
        e2 = guts.load(filename=fn)
        assert e1.region == e2.region
        assert e1.name == e2.name
        assert e1.lat == e2.lat
        assert e1.lon == e2.lon
        assert e1.time == e2.time
        assert e1.region == e2.region
        assert e1.magnitude == e2.magnitude
        assert e1.magnitude_type == e2.magnitude_type
        assert e1.get_hash() == e2.get_hash()

        fn2 = pjoin(tempdir, 'events.txt')
        guts.dump_all([e1, e2], filename=fn2)

        with self.assertRaises(model.OneEventRequired):
            model.load_one_event(fn2)

        shutil.rmtree(tempdir)
Ejemplo n.º 9
0
    def add_scenario(self, scenario_id, scenario_generator):

        if scenario_generator.seed is None:
            scenario_generator = guts.clone(scenario_generator)
            scenario_generator.seed = num.random.randint(1, 2**32-1)

        path = self.get_path(scenario_id)
        try:
            os.mkdir(path)
        except OSError as e:
            if e.errno == errno.EEXIST:
                raise ScenarioError(
                    'scenario id is already in use: %s' % scenario_id)
            else:
                raise

        scenario = ScenarioCollectionItem(
            scenario_id=scenario_id,
            time_created=time.time())

        scenario_path = self.get_path(scenario_id, 'scenario.yaml')
        guts.dump(scenario, filename=scenario_path)

        generator_path = self.get_path(scenario_id, 'generator.yaml')
        guts.dump(scenario_generator, filename=generator_path)

        scenario.set_base_path(self.get_path(scenario_id))
        scenario.init_modelling(self._engine)

        self._scenarios.append(scenario)
Ejemplo n.º 10
0
def write_info(info, path):
    try:
        guts.dump(info,
                  filename=path,
                  header='Grond run info file, version %s' % __version__)

    except OSError:
        raise GrondError('Cannot write Grond run info file: %s' % path)
Ejemplo n.º 11
0
def write_config(config, path):
    try:
        guts.dump(config,
                  filename=path,
                  header='Grond clustering configuration file, version %s' %
                  __version__)

    except OSError:
        raise GrondError('cannot write Grond report configuration file: %s' %
                         path)
Ejemplo n.º 12
0
def well_geometry_sparrow_export(file, folder, name):
    import utm
    from pyproj import Proj
    from pyrocko.model import Geometry
    data = num.loadtxt(file, delimiter=",")
    utmx = data[:, 0]
    utmy = data[:, 1]
    z = data[:, 2]

    proj_gk4 = Proj(init="epsg:31467")  # GK-projection
    lons, lats = proj_gk4(utmx, utmy, inverse=True)
    ev = event.Event(lat=num.mean(lats), lon=num.mean(lons), depth=num.mean(z),
                     time=util.str_to_time("2000-01-01 01:01:01"))

    ncorners = 4
    verts = []
    xs = []
    ys = []
    dist = 200.
    for i in range(0, len(z)):
        try:
            x = lats[i]
            y = lons[i]
            x1 = lats[i+1]
            y1 = lons[i+1]
            depth = z[i]
            depth1 = z[i+1]
            xyz = ([dist/2.8, dist/2.8, depth], [dist/2.8, dist/2.8, depth1],
                   [dist/2.8, -dist/2.8, depth1], [dist/2.8, -dist/2.8, depth])
            latlon = ([x, y], [x1, y1], [x1, y1], [x, y])
            patchverts = num.hstack((latlon, xyz))
            verts.append(patchverts)
        except Exception:
            pass

    vertices = num.vstack(verts)

    npatches = int(len(vertices))
    faces1 = num.arange(ncorners * npatches, dtype='int64').reshape(
        npatches, ncorners)
    faces2 = num.fliplr(faces1)
    faces = num.hstack((faces2, faces1))
    srf_semblance_list = []

    srf_semblance = num.ones(num.shape(data[:, 2]))
    srf_semblance = duplicate_property(srf_semblance)
    srf_semblance_list.append(srf_semblance)

    srf_semblance = num.asarray(srf_semblance_list).T
    srf_times = num.linspace(0, 1, 1)
    geom = Geometry(times=srf_times, event=ev)
    geom.setup(vertices, faces)
    sub_headers = tuple([str(i) for i in srf_times])
    geom.add_property((('semblance', 'float64', sub_headers)), srf_semblance)
    dump(geom, filename='geom_gtla1.yaml')
Ejemplo n.º 13
0
def dump_config(config):
    """
    Load configuration file.

    Parameters
    ----------
    config : :class:`BEATConfig`
    """
    config_file_name = 'config_' + config.problem_config.mode + '.yaml'
    conf_out = os.path.join(config.project_dir, config_file_name)
    dump(config, filename=conf_out)
Ejemplo n.º 14
0
def write_config(config, path):
    try:
        basepath = config.get_basepath()
        dirname = op.dirname(path) or '.'
        config.change_basepath(dirname)
        guts.dump(config,
                  filename=path,
                  header='Grond configuration file, version %s' % __version__)

        config.change_basepath(basepath)

    except OSError:
        raise GrondError('Cannot write Grond configuration file: %s' % path)
Ejemplo n.º 15
0
def load_config(project_dir, mode, update=[]):
    """
    Load configuration file.

    Parameters
    ----------
    project_dir : str
        path to the directory of the configuration file
    mode : str
        type of optimization problem: 'geometry' / 'static'/ 'kinematic'
    update : list
        of strings to update parameters
        'hypers' or/and 'hierarchicals'

    Returns
    -------
    :class:`BEATconfig`
    """
    updates_avail = ['hierarchicals', 'hypers']

    config_file_name = 'config_' + mode + '.yaml'

    config_fn = os.path.join(project_dir, config_file_name)

    try:
        config = load(filename=config_fn)
    except IOError:
        raise IOError('Cannot load config, file %s'
                      ' does not exist!' % config_fn)

    config.problem_config.validate_priors()

    for upd in update:
        if upd not in updates_avail:
            raise TypeError('Update not available for "%s"' % upd)

    if len(update) > 0:
        if 'hypers' in update:
            config.update_hypers()
            logger.info('Updated hyper parameters! Previous hyper'
                        ' parameter bounds are invalid now!')

        if 'hierarchicals' in update:
            config.update_hierarchicals()
            logger.info('Updated hierarchicals.')

        dump(config, filename=config_fn)

    return config
Ejemplo n.º 16
0
    def testSObject(self):

        class DotName(SObject):
            network = String.T()
            station = String.T()

            def __init__(self, s=None, **kwargs):
                if s is not None:
                    network, station = s.split('.')
                    kwargs = dict(network=network, station=station)

                SObject.__init__(self, **kwargs)

            def __str__(self):
                return '.'.join((self.network, self.station))

        class X(Object):
            xmltagname = 'root'
            dn = DotName.T()

        x = X(dn=DotName(network='abc', station='def'))
        self.assertEqual(x.dn.network, 'abc')
        self.assertEqual(x.dn.station, 'def')
        x = load_string(dump(x))
        self.assertEqual(x.dn.network, 'abc')
        self.assertEqual(x.dn.station, 'def')
        x = load_xml_string(dump_xml(x))
        self.assertEqual(x.dn.network, 'abc')
        self.assertEqual(x.dn.station, 'def')
Ejemplo n.º 17
0
    def testSObject(self):

        class DotName(SObject):
            network = String.T()
            station = String.T()

            def __init__(self, s=None, **kwargs):
                if s is not None:
                    network, station = s.split('.')
                    kwargs = dict(network=network, station=station)

                SObject.__init__(self, **kwargs)

            def __str__(self):
                return '.'.join((self.network, self.station))

        class X(Object):
            xmltagname = 'root'
            dn = DotName.T()

        x = X(dn=DotName(network='abc', station='def'))
        self.assertEqual(x.dn.network, 'abc')
        self.assertEqual(x.dn.station, 'def')
        x = load_string(dump(x))
        self.assertEqual(x.dn.network, 'abc')
        self.assertEqual(x.dn.station, 'def')
        x = load_xml_string(dump_xml(x))
        self.assertEqual(x.dn.network, 'abc')
        self.assertEqual(x.dn.station, 'def')
Ejemplo n.º 18
0
 def testIOEvent(self):
     tempdir = tempfile.mkdtemp()
     fn = pjoin(tempdir, 'event.txt')
     e1 = model.Event(
         10., 20., 1234567890., 'bubu', region='taka tuka land',
         moment_tensor=moment_tensor.MomentTensor(strike=45., dip=90),
         magnitude=5.1, magnitude_type='Mw')
     guts.dump(e1, filename=fn)
     e2 = guts.load(filename=fn)
     assert e1.region == e2.region
     assert e1.name == e2.name
     assert e1.lat == e2.lat
     assert e1.lon == e2.lon
     assert e1.time == e2.time
     assert e1.region == e2.region
     assert e1.magnitude == e2.magnitude
     assert e1.magnitude_type == e2.magnitude_type
     shutil.rmtree(tempdir)
Ejemplo n.º 19
0
    def test_yaml(self):
        ydata = num.random.random(100)
        tmin = time.time()
        tr1 = trace.Trace(
            'N', 'STA1', '', 'BHZ', tmin=tmin, deltat=0.1, ydata=ydata)

        s = guts.dump(tr1)
        tr2 = guts.load_string(s)

        assert tr1 == tr2
Ejemplo n.º 20
0
 def testIOEvent(self):
     tempdir = tempfile.mkdtemp()
     fn = pjoin(tempdir, 'event.txt')
     e1 = model.Event(10.,
                      20.,
                      1234567890.,
                      'bubu',
                      region='taka tuka land',
                      moment_tensor=moment_tensor.MomentTensor(strike=45.,
                                                               dip=90),
                      magnitude=5.1,
                      magnitude_type='Mw')
     guts.dump(e1, filename=fn)
     e2 = guts.load(filename=fn)
     assert e1.region == e2.region
     assert e1.name == e2.name
     assert e1.lat == e2.lat
     assert e1.lon == e2.lon
     assert e1.time == e2.time
     assert e1.region == e2.region
     assert e1.magnitude == e2.magnitude
     assert e1.magnitude_type == e2.magnitude_type
     shutil.rmtree(tempdir)
Ejemplo n.º 21
0
def upgrade_config_file(fn, diff=True, update=[]):
    rules = [
        ('beat.SeismicConfig', drop_attribute('blacklist')),
        ('beat.SeismicConfig', drop_attribute('calc_data_cov')),
        ('beat.SeismicConfig',
         set_attribute(
             'noise_estimator',
             aguts.load(string='''!beat.SeismicNoiseAnalyserConfig
                      structure: variance
                      pre_arrival_time: 5
                    '''), False)),
        ('beat.ProblemConfig',
         drop_attribute('dataset_specific_residual_noise_estimation')),
        ('beat.ProblemConfig', set_attribute('mode', 'ffi', 'ffo')),
        ('beat.WaveformFitConfig', set_attribute('preprocess_data', True,
                                                 True)),
        ('beat.WaveformFitConfig',
         set_attribute(
             'filterer',
             aguts.load(string='''
                    - !beat.heart.Filter
                      lower_corner: 0.01
                      upper_corner: 0.1
                      order: 4
                      '''))),
        ('beat.MetropolisConfig', drop_attribute('n_stages')),
        ('beat.MetropolisConfig', drop_attribute('stage')),
        ('beat.ParallelTemperingConfig', set_attribute('resample', False,
                                                       False)),
        ('beat.FFOConfig', rename_class('beat.FFIConfig')),
        ('beat.GeodeticLinearGFConfig', drop_attribute('extension_widths')),
        ('beat.GeodeticLinearGFConfig', drop_attribute('extension_lengths')),
        ('beat.GeodeticLinearGFConfig', drop_attribute('patch_widths')),
        ('beat.GeodeticLinearGFConfig', drop_attribute('patch_lengths')),
        ('beat.SeismicLinearGFConfig', drop_attribute('extension_widths')),
        ('beat.SeismicLinearGFConfig', drop_attribute('extension_lengths')),
        ('beat.SeismicLinearGFConfig', drop_attribute('patch_widths')),
        ('beat.SeismicLinearGFConfig', drop_attribute('patch_lengths')),
        ('beat.GeodeticConfig', drop_attribute('fit_plane')),
        ('beat.GeodeticConfig', drop_attribute('datadir')),
        ('beat.GeodeticConfig', drop_attribute('names')),
        ('beat.GeodeticConfig', drop_attribute('blacklist')),
        ('beat.GeodeticConfig', drop_attribute('types')),
        ('beat.EulerPoleConfig',
         rename_attribute('blacklist', 'station_blacklist'))
    ]

    def apply_rules(path, obj):
        for tagname, func in rules:
            if obj._tagname == tagname:
                func(path, obj)

    updates_avail = ['hierarchicals', 'hypers', 'structure']

    t1 = aguts.load(filename=fn)
    t2 = copy.deepcopy(t1)

    for upd in update:
        if upd not in updates_avail:
            raise TypeError('Update not available for "%s"' % upd)

    n_upd = len(update)
    if n_upd > 0:
        fn_tmp = fn + 'tmp'
        if 'structure' in update:
            aguts.apply_tree(t2, apply_rules)

        aguts.dump(t2, filename=fn_tmp, header=True)
        t2 = guts.load(filename=fn_tmp)
        if 'hypers' in update:
            t2.update_hypers()

        if 'hierarchicals' in update:
            t2.update_hierarchicals()

        t2.problem_config.validate_priors()
        guts.dump(t2, filename=fn_tmp)
        t2 = aguts.load(filename=fn_tmp)
    else:
        fn_tmp = fn

    s1 = aguts.dump(t1)
    s2 = aguts.dump(t2)

    if diff:
        result = list(
            difflib.unified_diff(s1.splitlines(1), s2.splitlines(1),
                                 'normalized old', 'normalized new'))

        if sys.stdout.isatty():
            sys.stdout.writelines(color_diff(result))
        else:
            sys.stdout.writelines(result)
    else:
        aguts.dump(t2, filename=fn_tmp, header=True)
        upd_config = guts.load(filename=fn_tmp)
        if 'hypers' in update:
            logger.info('Updated hyper parameters! Previous hyper'
                        ' parameter bounds are invalid now!')

        if 'hierarchicals' in update:
            logger.info('Updated hierarchicals.')

        guts.dump(upd_config, filename=fn)

    if n_upd > 0:
        os.remove(fn_tmp)
Ejemplo n.º 22
0
 def dump_problem_info(self, dirname):
     fn = op.join(dirname, 'problem.yaml')
     util.ensuredirs(fn)
     guts.dump(self, filename=fn)
Ejemplo n.º 23
0
def write_config(conf, config_name='config'):
    conf_path = expand(make_conf_path_tmpl(config_name))
    util.ensuredirs(conf_path)
    dump(conf, filename=conf_path)
Ejemplo n.º 24
0
def main():
    parser = OptionParser(usage=usage, description=description)

    parser.add_option('--force',
                      dest='force',
                      action='store_true',
                      default=False,
                      help='allow recreation of output <directory>')

    parser.add_option('--debug',
                      dest='debug',
                      action='store_true',
                      default=False,
                      help='print debugging information to stderr')

    parser.add_option('--dry-run',
                      dest='dry_run',
                      action='store_true',
                      default=False,
                      help='show available stations/channels and exit '
                      '(do not download waveforms)')

    parser.add_option('--continue',
                      dest='continue_',
                      action='store_true',
                      default=False,
                      help='continue download after a accident')

    parser.add_option('--local-data',
                      dest='local_data',
                      action='append',
                      help='add file/directory with local data')

    parser.add_option('--local-stations',
                      dest='local_stations',
                      action='append',
                      help='add local stations file')

    parser.add_option('--selection',
                      dest='selection_file',
                      action='append',
                      help='add local stations file')

    parser.add_option(
        '--local-responses-resp',
        dest='local_responses_resp',
        action='append',
        help='add file/directory with local responses in RESP format')

    parser.add_option('--local-responses-pz',
                      dest='local_responses_pz',
                      action='append',
                      help='add file/directory with local pole-zero responses')

    parser.add_option(
        '--local-responses-stationxml',
        dest='local_responses_stationxml',
        help='add file with local response information in StationXML format')

    parser.add_option(
        '--window',
        dest='window',
        default='full',
        help='set time window to choose [full, p, "<time-start>,<time-end>"'
        '] (time format is YYYY-MM-DD HH:MM:SS)')

    parser.add_option(
        '--out-components',
        choices=['enu', 'rtu'],
        dest='out_components',
        default='rtu',
        help='set output component orientations to radial-transverse-up [rtu] '
        '(default) or east-north-up [enu]')

    parser.add_option('--out-units',
                      choices=['M', 'M/S', 'M/S**2'],
                      dest='output_units',
                      default='M',
                      help='set output units to displacement "M" (default),'
                      ' velocity "M/S" or acceleration "M/S**2"')

    parser.add_option(
        '--padding-factor',
        type=float,
        default=3.0,
        dest='padding_factor',
        help='extend time window on either side, in multiples of 1/<fmin_hz> '
        '(default: 5)')

    parser.add_option(
        '--zero-padding',
        dest='zero_pad',
        action='store_true',
        default=False,
        help='Extend traces by zero-padding if clean restitution requires'
        'longer windows')

    parser.add_option(
        '--credentials',
        dest='user_credentials',
        action='append',
        default=[],
        metavar='SITE,USER,PASSWD',
        help='user credentials for specific site to access restricted data '
        '(this option can be repeated)')

    parser.add_option(
        '--token',
        dest='auth_tokens',
        metavar='SITE,FILENAME',
        action='append',
        default=[],
        help='user authentication token for specific site to access '
        'restricted data (this option can be repeated)')

    parser.add_option(
        '--sites',
        dest='sites',
        metavar='SITE1,SITE2,...',
        default='geofon,iris,orfeus',
        help='sites to query (available: %s, default: "%%default"' %
        ', '.join(g_sites_available))

    parser.add_option(
        '--band-codes',
        dest='priority_band_code',
        metavar='V,L,M,B,H,S,E,...',
        default='B,H',
        help='select and prioritize band codes (default: %default)')

    parser.add_option(
        '--instrument-codes',
        dest='priority_instrument_code',
        metavar='H,L,G,...',
        default='H,L',
        help='select and prioritize instrument codes (default: %default)')

    parser.add_option('--radius-min',
                      dest='radius_min',
                      metavar='VALUE',
                      default=0.0,
                      type=float,
                      help='minimum radius [km]')

    parser.add_option('--nstations-wanted',
                      dest='nstations_wanted',
                      metavar='N',
                      type=int,
                      help='number of stations to select initially')

    (options, args) = parser.parse_args(sys.argv[1:])

    print('Parsed arguments:', args)
    if len(args) not in (10, 7, 6):
        parser.print_help()
        sys.exit(1)

    if options.debug:
        util.setup_logging(program_name, 'debug')
    else:
        util.setup_logging(program_name, 'info')

    if options.local_responses_pz and options.local_responses_resp:
        logger.critical('cannot use local responses in PZ and RESP '
                        'format at the same time')
        sys.exit(1)

    n_resp_opt = 0
    for resp_opt in (options.local_responses_pz, options.local_responses_resp,
                     options.local_responses_stationxml):

        if resp_opt:
            n_resp_opt += 1

    if n_resp_opt > 1:
        logger.critical('can only handle local responses from either PZ or '
                        'RESP or StationXML. Cannot yet merge different '
                        'response formats.')
        sys.exit(1)

    if options.local_responses_resp and not options.local_stations:
        logger.critical('--local-responses-resp can only be used '
                        'when --stations is also given.')
        sys.exit(1)

    try:
        ename = ''
        magnitude = None
        mt = None
        if len(args) == 10:
            time = util.str_to_time(args[1] + ' ' + args[2])
            lat = float(args[3])
            lon = float(args[4])
            depth = float(args[5]) * km
            iarg = 6

        elif len(args) == 7:
            if args[2].find(':') == -1:
                sname_or_date = None
                lat = float(args[1])
                lon = float(args[2])
                event = None
                time = None
            else:
                sname_or_date = args[1] + ' ' + args[2]

            iarg = 3

        elif len(args) == 6:
            sname_or_date = args[1]
            iarg = 2

        if len(args) in (7, 6) and sname_or_date is not None:
            events = get_events_by_name_or_date([sname_or_date],
                                                catalog=geofon)
            if len(events) == 0:
                logger.critical('no event found')
                sys.exit(1)
            elif len(events) > 1:
                logger.critical('more than one event found')
                sys.exit(1)

            event = events[0]
            time = event.time
            lat = event.lat
            lon = event.lon
            depth = event.depth
            ename = event.name
            magnitude = event.magnitude
            mt = event.moment_tensor

        radius = float(args[iarg]) * km
        fmin = float(args[iarg + 1])
        sample_rate = float(args[iarg + 2])

        eventname = args[iarg + 3]
        cwd = str(sys.argv[1])
        event_dir = op.join(cwd, 'data', 'events', eventname)
        output_dir = op.join(event_dir, 'waveforms')
    except:
        raise
        parser.print_help()
        sys.exit(1)

    if options.force and op.isdir(event_dir):
        if not options.continue_:
            shutil.rmtree(event_dir)

    if op.exists(event_dir) and not options.continue_:
        logger.critical(
            'directory "%s" exists. Delete it first or use the --force option'
            % event_dir)
        sys.exit(1)

    util.ensuredir(output_dir)

    if time is not None:
        event = model.Event(time=time,
                            lat=lat,
                            lon=lon,
                            depth=depth,
                            name=ename,
                            magnitude=magnitude,
                            moment_tensor=mt)

    if options.window == 'full':
        if event is None:
            logger.critical('need event for --window=full')
            sys.exit(1)

        low_velocity = 1500.
        timewindow = VelocityWindow(low_velocity,
                                    tpad=options.padding_factor / fmin)

        tmin, tmax = timewindow(time, radius, depth)

    elif options.window == 'p':
        if event is None:
            logger.critical('need event for --window=p')
            sys.exit(1)

        phases = list(map(cake.PhaseDef, 'P p'.split()))
        emod = cake.load_model()

        tpad = options.padding_factor / fmin
        timewindow = PhaseWindow(emod, phases, -tpad, tpad)

        arrivaltimes = []
        for dist in num.linspace(0, radius, 20):
            try:
                arrivaltimes.extend(timewindow(time, dist, depth))
            except NoArrival:
                pass

        if not arrivaltimes:
            logger.error('required phase arrival not found')
            sys.exit(1)

        tmin = min(arrivaltimes)
        tmax = max(arrivaltimes)

    else:
        try:
            stmin, stmax = options.window.split(',')
            tmin = util.str_to_time(stmin.strip())
            tmax = util.str_to_time(stmax.strip())

            timewindow = FixedWindow(tmin, tmax)

        except ValueError:
            logger.critical('invalid argument to --window: "%s"' %
                            options.window)
            sys.exit(1)

    if event is not None:
        event.name = eventname

    tfade = tfade_factor / fmin

    tpad = tfade

    tmin -= tpad
    tmax += tpad

    tinc = None

    priority_band_code = options.priority_band_code.split(',')
    for s in priority_band_code:
        if len(s) != 1:
            logger.critical('invalid band code: %s' % s)

    priority_instrument_code = options.priority_instrument_code.split(',')
    for s in priority_instrument_code:
        if len(s) != 1:
            logger.critical('invalid instrument code: %s' % s)

    station_query_conf = dict(latitude=lat,
                              longitude=lon,
                              minradius=options.radius_min * km * cake.m2d,
                              maxradius=radius * cake.m2d,
                              channel=','.join('%s??' % s
                                               for s in priority_band_code))

    target_sample_rate = sample_rate

    fmax = target_sample_rate

    # target_sample_rate = None
    # priority_band_code = ['H', 'B', 'M', 'L', 'V', 'E', 'S']

    priority_units = ['M/S', 'M', 'M/S**2']

    # output_units = 'M'

    sites = [x.strip() for x in options.sites.split(',') if x.strip()]

    for site in sites:
        if site not in g_sites_available:
            logger.critical('unknown FDSN site: %s' % site)
            sys.exit(1)

    for s in options.user_credentials:
        try:
            site, user, passwd = s.split(',')
            g_user_credentials[site] = user, passwd
        except ValueError:
            logger.critical('invalid format for user credentials: "%s"' % s)
            sys.exit(1)

    for s in options.auth_tokens:
        try:
            site, token_filename = s.split(',')
            with open(token_filename, 'r') as f:
                g_auth_tokens[site] = f.read()
        except (ValueError, OSError, IOError):
            logger.critical('cannot get token from file: %s' % token_filename)
            sys.exit(1)

    fn_template0 = \
        'data_%(network)s.%(station)s.%(location)s.%(channel)s_%(tmin)s.mseed'

    fn_template_raw = op.join(output_dir, 'raw', fn_template0)
    fn_stations_raw = op.join(output_dir, 'stations.raw.txt')
    fn_template_rest = op.join(output_dir, 'rest', fn_template0)
    fn_commandline = op.join(output_dir, 'beatdown.command')

    ftap = (ffade_factors[0] * fmin, fmin, fmax, ffade_factors[1] * fmax)

    # chapter 1: download

    sxs = []
    for site in sites:
        try:
            extra_args = {
                'iris': dict(matchtimeseries=True),
            }.get(site, {})

            extra_args.update(station_query_conf)

            if site == 'geonet':
                extra_args.update(starttime=tmin, endtime=tmax)
            else:
                extra_args.update(startbefore=tmax,
                                  endafter=tmin,
                                  includerestricted=(site in g_user_credentials
                                                     or site in g_auth_tokens))

            logger.info('downloading channel information (%s)' % site)
            sx = fdsn.station(site=site,
                              format='text',
                              level='channel',
                              **extra_args)

        except fdsn.EmptyResult:
            logger.error('No stations matching given criteria. (%s)' % site)
            sx = None

        if sx is not None:
            sxs.append(sx)

    if all(sx is None for sx in sxs) and not options.local_data:
        sys.exit(1)

    nsl_to_sites = defaultdict(list)
    nsl_to_station = {}

    if options.selection_file:
        logger.info('using stations from stations file!')
        stations = []
        for fn in options.selection_file:
            stations.extend(model.load_stations(fn))

        nsls_selected = set(s.nsl() for s in stations)
    else:
        nsls_selected = None

    for sx, site in zip(sxs, sites):
        site_stations = sx.get_pyrocko_stations()
        for s in site_stations:
            nsl = s.nsl()

            nsl_to_sites[nsl].append(site)
            if nsl not in nsl_to_station:
                if nsls_selected:
                    if nsl in nsls_selected:
                        nsl_to_station[nsl] = s
                else:
                    nsl_to_station[
                        nsl] = s  # using first site with this station

        logger.info('number of stations found: %i' % len(nsl_to_station))

    # station weeding
    if options.nstations_wanted:
        nsls_selected = None
        stations_all = [
            nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())
        ]

        for s in stations_all:
            s.set_event_relative_data(event)

        stations_selected = weeding.weed_stations(stations_all,
                                                  options.nstations_wanted)[0]

        nsls_selected = set(s.nsl() for s in stations_selected)
        logger.info('number of stations selected: %i' % len(nsls_selected))

    if tinc is None:
        tinc = 3600.

    have_data = set()

    if options.continue_:
        fns = glob.glob(fn_template_raw % starfill())
        p = pile.make_pile(fns)
    else:
        fns = []

    have_data_site = {}
    could_have_data_site = {}
    for site in sites:
        have_data_site[site] = set()
        could_have_data_site[site] = set()

    available_through = defaultdict(set)
    it = 0
    nt = int(math.ceil((tmax - tmin) / tinc))
    for it in range(nt):
        tmin_win = tmin + it * tinc
        tmax_win = min(tmin + (it + 1) * tinc, tmax)
        logger.info('time window %i/%i (%s - %s)' %
                    (it + 1, nt, util.tts(tmin_win), util.tts(tmax_win)))

        have_data_this_window = set()
        if options.continue_:
            trs_avail = p.all(tmin=tmin_win, tmax=tmax_win, load_data=False)
            for tr in trs_avail:
                have_data_this_window.add(tr.nslc_id)
        for site, sx in zip(sites, sxs):
            if sx is None:
                continue

            selection = []
            channels = sx.choose_channels(
                target_sample_rate=target_sample_rate,
                priority_band_code=priority_band_code,
                priority_units=priority_units,
                priority_instrument_code=priority_instrument_code,
                timespan=(tmin_win, tmax_win))

            for nslc in sorted(channels.keys()):
                if nsls_selected is not None and nslc[:3] not in nsls_selected:
                    continue

                could_have_data_site[site].add(nslc)

                if nslc not in have_data_this_window:
                    channel = channels[nslc]
                    if event:
                        lat_, lon_ = event.lat, event.lon
                    else:
                        lat_, lon_ = lat, lon
                    try:
                        dist = orthodrome.distance_accurate50m_numpy(
                            lat_, lon_, channel.latitude.value,
                            channel.longitude.value)
                    except:
                        dist = orthodrome.distance_accurate50m_numpy(
                            lat_, lon_, channel.latitude, channel.longitude)

                    if event:
                        depth_ = event.depth
                        time_ = event.time
                    else:
                        depth_ = None
                        time_ = None

                    tmin_, tmax_ = timewindow(time_, dist, depth_)

                    tmin_this = tmin_ - tpad
                    tmax_this = float(tmax_ + tpad)

                    tmin_req = max(tmin_win, tmin_this)
                    tmax_req = min(tmax_win, tmax_this)
                    if channel.sample_rate:
                        try:
                            deltat = 1.0 / int(channel.sample_rate.value)
                        except:
                            deltat = 1.0 / int(channel.sample_rate)
                    else:
                        deltat = 1.0

                    if tmin_req < tmax_req:
                        logger.debug('deltat %f' % deltat)
                        # extend time window by some samples because otherwise
                        # sometimes gaps are produced
                        # apparently the WS are only sensitive to full seconds
                        # round to avoid gaps, increase safetiy window
                        selection.append(nslc +
                                         (math.floor(tmin_req - deltat * 20.0),
                                          math.ceil(tmax_req + deltat * 20.0)))
            if options.dry_run:
                for (net, sta, loc, cha, tmin, tmax) in selection:
                    available_through[net, sta, loc, cha].add(site)

            else:
                neach = 100
                i = 0
                nbatches = ((len(selection) - 1) // neach) + 1
                while i < len(selection):
                    selection_now = selection[i:i + neach]
                    f = tempfile.NamedTemporaryFile()
                    try:
                        sbatch = ''
                        if nbatches > 1:
                            sbatch = ' (batch %i/%i)' % (
                                (i // neach) + 1, nbatches)

                        logger.info('downloading data (%s)%s' % (site, sbatch))
                        data = fdsn.dataselect(site=site,
                                               selection=selection_now,
                                               **get_user_credentials(site))

                        while True:
                            buf = data.read(1024)
                            if not buf:
                                break
                            f.write(buf)

                        f.flush()

                        trs = io.load(f.name)
                        for tr in trs:
                            tr.fix_deltat_rounding_errors()
                            logger.debug('cutting window: %f - %f' %
                                         (tmin_win, tmax_win))
                            logger.debug(
                                'available window: %f - %f, nsamples: %g' %
                                (tr.tmin, tr.tmax, tr.ydata.size))
                            try:
                                logger.debug('tmin before snap %f' % tr.tmin)
                                tr.snap(interpolate=True)
                                logger.debug('tmin after snap %f' % tr.tmin)
                                tr.chop(tmin_win,
                                        tmax_win,
                                        snap=(math.floor, math.ceil),
                                        include_last=True)
                                logger.debug(
                                    'cut window: %f - %f, nsamles: %g' %
                                    (tr.tmin, tr.tmax, tr.ydata.size))
                                have_data.add(tr.nslc_id)
                                have_data_site[site].add(tr.nslc_id)
                            except trace.NoData:
                                pass

                        fns2 = io.save(trs, fn_template_raw)
                        for fn in fns2:
                            if fn in fns:
                                logger.warn('overwriting file %s', fn)
                        fns.extend(fns2)

                    except fdsn.EmptyResult:
                        pass

                    except HTTPError:
                        logger.warn('an error occurred while downloading data '
                                    'for channels \n  %s' %
                                    '\n  '.join('.'.join(x[:4])
                                                for x in selection_now))

                    f.close()
                    i += neach

    if options.dry_run:
        nslcs = sorted(available_through.keys())

        all_channels = defaultdict(set)
        all_stations = defaultdict(set)

        def plural_s(x):
            return '' if x == 1 else 's'

        for nslc in nslcs:
            sites = tuple(sorted(available_through[nslc]))
            logger.info('selected: %s.%s.%s.%s from site%s %s' %
                        (nslc + (plural_s(len(sites)), '+'.join(sites))))

            all_channels[sites].add(nslc)
            all_stations[sites].add(nslc[:3])

        nchannels_all = 0
        nstations_all = 0
        for sites in sorted(all_channels.keys(),
                            key=lambda sites: (-len(sites), sites)):

            nchannels = len(all_channels[sites])
            nstations = len(all_stations[sites])
            nchannels_all += nchannels
            nstations_all += nstations
            logger.info('selected (%s): %i channel%s (%i station%s)' %
                        ('+'.join(sites), nchannels, plural_s(nchannels),
                         nstations, plural_s(nstations)))

        logger.info('selected total: %i channel%s (%i station%s)' %
                    (nchannels_all, plural_s(nchannels_all), nstations_all,
                     plural_s(nstations_all)))

        logger.info('dry run done.')
        sys.exit(0)

    for nslc in have_data:
        # if we are in continue mode, we have to guess where the data came from
        if not any(nslc in have_data_site[site] for site in sites):
            for site in sites:
                if nslc in could_have_data_site[site]:
                    have_data_site[site].add(nslc)

    sxs = {}
    for site in sites:
        selection = []
        for nslc in sorted(have_data_site[site]):
            selection.append(nslc + (tmin - tpad, tmax + tpad))

        if selection:
            logger.info('downloading response information (%s)' % site)
            sxs[site] = fdsn.station(site=site,
                                     level='response',
                                     selection=selection)

            sxs[site].dump_xml(filename=op.join(output_dir, 'stations.%s.xml' %
                                                site))

    # chapter 1.5: inject local data

    if options.local_data:
        have_data_site['local'] = set()
        plocal = pile.make_pile(options.local_data, fileformat='detect')
        logger.info(
            'Importing local data from %s between %s (%f) and %s (%f)' %
            (options.local_data, util.time_to_str(tmin), tmin,
             util.time_to_str(tmax), tmax))
        for traces in plocal.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                             tmin=tmin,
                                             tmax=tmax,
                                             tinc=tinc):

            for tr in traces:
                if tr.nslc_id not in have_data:
                    fns.extend(io.save(traces, fn_template_raw))
                    have_data_site['local'].add(tr.nslc_id)
                    have_data.add(tr.nslc_id)

        sites.append('local')

    if options.local_responses_pz:
        sxs['local'] = epz.make_stationxml(
            epz.iload(options.local_responses_pz))

    if options.local_responses_resp:
        local_stations = []
        for fn in options.local_stations:
            local_stations.extend(model.load_stations(fn))

        sxs['local'] = resp.make_stationxml(
            local_stations, resp.iload(options.local_responses_resp))

    if options.local_responses_stationxml:
        sxs['local'] = stationxml.load_xml(
            filename=options.local_responses_stationxml)

    # chapter 1.6: dump raw data stations file

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    stations = [nsl_to_station[nsl_] for nsl_ in sorted(nsl_to_station.keys())]

    util.ensuredirs(fn_stations_raw)
    model.dump_stations(stations, fn_stations_raw)

    dump_commandline(sys.argv, fn_commandline)

    # chapter 2: restitution

    if not fns:
        logger.error('no data available')
        sys.exit(1)

    p = pile.make_pile(fns, show_progress=False)
    p.get_deltatmin()
    otinc = None
    if otinc is None:
        otinc = nice_seconds_floor(p.get_deltatmin() * 500000.)
    otinc = 3600.
    otmin = math.floor(p.tmin / otinc) * otinc
    otmax = math.ceil(p.tmax / otinc) * otinc
    otpad = tpad * 2

    fns = []
    rest_traces_b = []
    win_b = None
    for traces_a in p.chopper_grouped(gather=lambda tr: tr.nslc_id,
                                      tmin=otmin,
                                      tmax=otmax,
                                      tinc=otinc,
                                      tpad=otpad):

        rest_traces_a = []
        win_a = None
        for tr in traces_a:
            win_a = tr.wmin, tr.wmax

            if win_b and win_b[0] >= win_a[0]:
                fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))
                rest_traces_b = []
                win_b = None

            response = None
            failure = []
            for site in sites:
                try:
                    if site not in sxs:
                        continue
                    logger.debug('Getting response for %s' % tr.__str__())
                    response = sxs[site].get_pyrocko_response(
                        tr.nslc_id,
                        timespan=(tr.tmin, tr.tmax),
                        fake_input_units=options.output_units)

                    break

                except stationxml.NoResponseInformation:
                    failure.append('%s: no response information' % site)

                except stationxml.MultipleResponseInformation:
                    failure.append('%s: multiple response information' % site)

            if response is None:
                failure = ', '.join(failure)

            else:
                failure = ''
                try:
                    if tr.tmin > tmin and options.zero_pad:
                        logger.warning(
                            'Trace too short for clean restitution in '
                            'desired frequency band -> zero-padding!')
                        tr.extend(tr.tmin - tfade, tr.tmax + tfade, 'repeat')

                    rest_tr = tr.transfer(tfade, ftap, response, invert=True)
                    rest_traces_a.append(rest_tr)

                except (trace.TraceTooShort, trace.NoData):
                    failure = 'trace too short'

            if failure:
                logger.warn('failed to restitute trace %s.%s.%s.%s (%s)' %
                            (tr.nslc_id + (failure, )))

        if rest_traces_b:
            rest_traces = trace.degapper(rest_traces_b + rest_traces_a,
                                         deoverlap='crossfade_cos')

            fns.extend(cut_n_dump(rest_traces, win_b, fn_template_rest))
            rest_traces_a = []
            if win_a:
                for tr in rest_traces:
                    try:
                        rest_traces_a.append(
                            tr.chop(win_a[0], win_a[1] + otpad, inplace=False))
                    except trace.NoData:
                        pass

        rest_traces_b = rest_traces_a
        win_b = win_a

    fns.extend(cut_n_dump(rest_traces_b, win_b, fn_template_rest))

    # chapter 3: rotated restituted traces for inspection

    if not event:
        sys.exit(0)

    fn_template1 = \
        'DISPL.%(network)s.%(station)s.%(location)s.%(channel)s'

    fn_waveforms = op.join(output_dir, 'prepared', fn_template1)
    fn_stations = op.join(output_dir, 'stations.prepared.txt')
    fn_event = op.join(event_dir, 'event.txt')
    fn_event_yaml = op.join(event_dir, 'event.yaml')

    nsl_to_station = {}
    for site in sites:
        if site in sxs:
            stations = sxs[site].get_pyrocko_stations(timespan=(tmin, tmax))
            for s in stations:
                nsl = s.nsl()
                if nsl not in nsl_to_station:
                    nsl_to_station[nsl] = s

    p = pile.make_pile(fns, show_progress=False)

    deltat = None
    if sample_rate is not None:
        deltat = 1.0 / sample_rate

    traces_beat = []
    used_stations = []
    for nsl, s in nsl_to_station.items():
        s.set_event_relative_data(event)
        traces = p.all(trace_selector=lambda tr: tr.nslc_id[:3] == nsl)

        if options.out_components == 'rtu':
            pios = s.guess_projections_to_rtu(out_channels=('R', 'T', 'Z'))
        elif options.out_components == 'enu':
            pios = s.guess_projections_to_enu(out_channels=('E', 'N', 'Z'))
        else:
            assert False

        for (proj, in_channels, out_channels) in pios:

            proc = trace.project(traces, proj, in_channels, out_channels)
            for tr in proc:
                tr_beat = heart.SeismicDataset.from_pyrocko_trace(tr)
                traces_beat.append(tr_beat)
                for ch in out_channels:
                    if ch.name == tr.channel:
                        s.add_channel(ch)

            if proc:
                io.save(proc, fn_waveforms)
                used_stations.append(s)

    stations = list(used_stations)
    util.ensuredirs(fn_stations)
    model.dump_stations(stations, fn_stations)
    model.dump_events([event], fn_event)

    from pyrocko.guts import dump
    dump([event], filename=fn_event_yaml)

    utility.dump_objects(op.join(cwd, 'seismic_data.pkl'),
                         outlist=[stations, traces_beat])
    logger.info('prepared waveforms from %i stations' % len(stations))
Ejemplo n.º 25
0
def upgrade_config_file(fn, diff=True, update=[]):
    rules = [
        ('beat.SeismicConfig', drop_attribute('blacklist')),
        ('beat.SeismicConfig', drop_attribute('calc_data_cov')),
        ('beat.SeismicConfig',
         set_attribute(
             'noise_estimator',
             aguts.load(string='''!beat.SeismicNoiseAnalyserConfig
                      structure: identity
                      pre_arrival_time: 5
                    '''))),
        ('beat.WaveformFitConfig', set_attribute('blacklist', [])),
        ('beat.WaveformFitConfig', set_attribute('preprocess_data', True)),
        ('beat.MetropolisConfig', drop_attribute('n_stages')),
        ('beat.MetropolisConfig', drop_attribute('stage')),
        ('beat.ParallelTemperingConfig', set_attribute('resample', False)),
    ]

    def apply_rules(path, obj):
        for tagname, func in rules:
            if obj._tagname == tagname:
                func(path, obj)

    updates_avail = ['hierarchicals', 'hypers', 'structure']

    t1 = aguts.load(filename=fn)
    t2 = copy.deepcopy(t1)

    for upd in update:
        if upd not in updates_avail:
            raise TypeError('Update not available for "%s"' % upd)

    n_upd = len(update)
    if n_upd > 0:
        fn_tmp = fn + 'tmp'
        if 'structure' in update:
            aguts.apply_tree(t2, apply_rules)

        aguts.dump(t2, filename=fn_tmp, header=True)
        t2 = guts.load(filename=fn_tmp)
        if 'hypers' in update:
            t2.update_hypers()

        if 'hierarchicals' in update:
            t2.update_hierarchicals()

        guts.dump(t2, filename=fn_tmp)
        t2 = aguts.load(filename=fn_tmp)
    else:
        fn_tmp = fn

    s1 = aguts.dump(t1)
    s2 = aguts.dump(t2)

    if diff:
        result = list(
            difflib.unified_diff(s1.splitlines(1), s2.splitlines(1),
                                 'normalized old', 'normalized new'))

        if sys.stdout.isatty():
            sys.stdout.writelines(color_diff(result))
        else:
            sys.stdout.writelines(result)
    else:
        aguts.dump(t2, filename=fn_tmp, header=True)
        upd_config = guts.load(filename=fn_tmp)
        if 'hypers' in update:
            logger.info('Updated hyper parameters! Previous hyper'
                        ' parameter bounds are invalid now!')

        if 'hierarchicals' in update:
            logger.info('Updated hierarchicals.')

        guts.dump(upd_config, filename=fn)

    if n_upd > 0:
        os.remove(fn_tmp)
Ejemplo n.º 26
0
def report(env,
           report_config=None,
           update_without_plotting=True,
           make_index=True,
           make_archive=True,
           nthreads=0):

    if report_config is None:
        report_config = ReportConfig()
        report_config.set_basepath('.')

    event_name = env.get_current_event_name()
    logger.info('Creating report entry for run "%s"...' % event_name)

    fp = report_config.expand_path
    entry_path = expand_template(
        op.join(fp(report_config.report_base_path),
                report_config.entries_sub_path),
        dict(event_name=event_name, problem_name=event_name))

    if op.exists(entry_path) and not update_without_plotting:
        shutil.rmtree(entry_path)

    util.ensuredir(entry_path)
    plots_dir_out = op.join(entry_path, 'plots')
    util.ensuredir(plots_dir_out)
    configs_dir = op.join(op.split(__file__)[0], 'app/configs/')
    rundir_path = env.get_rundir_path()
    try:
        os.system("cp -r %s/grun/plots/* %s" % (rundir_path, plots_dir_out))
    except:
        pass
    os.system("cp %s/plot_collection.yaml %s" % (configs_dir, plots_dir_out))

    util.ensuredir("%s/shakemap/default/" % (plots_dir_out))
    os.system(
        "cp %s/*shakemap.png %s/shakemap/default/shakemap.default.gf_shakemap.d100.png"
        % (rundir_path, plots_dir_out))
    os.system("cp %s/shakemap.default.plot_group.yaml %s/shakemap/default/" %
              (configs_dir, plots_dir_out))

    util.ensuredir("%s/location/default/" % (plots_dir_out))
    os.system(
        "cp %s/*location.png %s/location/default/location.default.location.d100.png"
        % (rundir_path, plots_dir_out))
    os.system("cp %s/location.default.plot_group.yaml %s/location/default/" %
              (configs_dir, plots_dir_out))

    util.ensuredir("%s/production_data/default/" % (plots_dir_out))
    os.system(
        "cp %s/*production_data.png %s/production_data/default/production_data.default.production_data.d100.png"
        % (rundir_path, plots_dir_out))
    os.system(
        "cp %s/production_data.default.plot_group.yaml %s/production_data/default/"
        % (configs_dir, plots_dir_out))

    util.ensuredir("%s/waveforms/default/" % (plots_dir_out))
    os.system(
        "cp %s/waveforms_1.png %s/waveforms/default/waveforms.default.waveforms_1.d100.png"
        % (rundir_path, plots_dir_out))
    os.system(
        "cp %s/waveforms_2.png %s/waveforms/default/waveforms.default.waveforms_2.d100.png"
        % (rundir_path, plots_dir_out))
    os.system(
        "cp %s/waveforms_3.png %s/waveforms/default/waveforms.default.waveforms_3.d100.png"
        % (rundir_path, plots_dir_out))
    os.system("cp %s/waveforms.default.plot_group.yaml %s/waveforms/default/" %
              (configs_dir, plots_dir_out))

    os.system("cp %s/grun/config.yaml %s/config.yaml" %
              (rundir_path, entry_path))

    try:

        event = model.load_events(rundir_path + "event.txt")
        model.dump_events(event,
                          filename=op.join(entry_path, 'event_reference.yaml'),
                          format="yaml")
        event = event[0]
        from silvertine import plot
        pcc = report_config.plot_config_collection.get_weeded(env)
        plot.make_plots(env,
                        plots_path=op.join(entry_path, 'plots'),
                        plot_config_collection=pcc)

        try:
            run_info = env.get_run_info()
        except environment.NoRundirAvailable:
            run_info = None

        rie = ReportIndexEntry(path='.',
                               problem_name=event_name,
                               silvertine_version="0.01",
                               run_info=run_info)

        fn = op.join(entry_path, 'event_reference.yaml')
        if op.exists(fn):
            rie.event_best = guts.load(filename=fn)

        fn = op.join(entry_path, 'event_reference.yaml')
        if op.exists(fn):
            rie.event_reference = guts.load(filename=fn)

        fn = op.join(entry_path, 'index.yaml')
        guts.dump(rie, filename=fn)

        logger.info('Done creating report entry for run "%s".' % "test")

    #    report_index(report_config)

    #    if make_archive:
    #        report_archive(report_config)
    except FileNotFoundError:
        pass
Ejemplo n.º 27
0
def write_config(config, path):
    basepath = config.get_basepath()
    dirname = op.dirname(path) or '.'
    config.change_basepath(dirname)
    dump(config, filename=path)
    config.change_basepath(basepath)
Ejemplo n.º 28
0
def dump_misfit_result_collection(misfit_result_collection, path):
    dump(misfit_result_collection, filename=path)
Ejemplo n.º 29
0
def process_event(ievent, g_data_id):

    environment, force, preserve, status, nparallel = \
        g_state[g_data_id]

    config = environment.get_config()
    event_name = environment.get_selected_event_names()[ievent]
    nevents = environment.nevents_selected
    tstart = time.time()

    ds = config.get_dataset(event_name)
    event = ds.get_event()
    problem = config.get_problem(event)

    synt = ds.synthetic_test
    if synt:
        problem.base_source = problem.get_source(synt.get_x())

    check_problem(problem)

    rundir = expand_template(
        config.rundir_template,
        dict(problem_name=problem.name))
    environment.set_rundir_path(rundir)

    if op.exists(rundir):
        if preserve:
            nold_rundirs = len(glob.glob(rundir + '*'))
            shutil.move(rundir, rundir+'-old-%d' % (nold_rundirs))
        elif force:
            shutil.rmtree(rundir)
        else:
            logger.warn('Skipping problem "%s": rundir already exists: %s' %
                        (problem.name, rundir))
            return

    util.ensuredir(rundir)

    logger.info(
        'Starting event %i / %i' % (ievent+1, nevents))

    logger.info('Rundir: %s' % rundir)

    logger.info('Analysing problem "%s".' % problem.name)

    for analyser_conf in config.analyser_configs:
        analyser = analyser_conf.get_analyser()
        analyser.analyse(problem, ds)

    basepath = config.get_basepath()
    config.change_basepath(rundir)
    guts.dump(config, filename=op.join(rundir, 'config.yaml'))
    config.change_basepath(basepath)

    optimiser = config.optimiser_config.get_optimiser()
    optimiser.init_bootstraps(problem)
    problem.dump_problem_info(rundir)

    monitor = None
    if status == 'state':
        monitor = GrondMonitor.watch(rundir)

    xs_inject = None
    synt = ds.synthetic_test
    if synt and synt.inject_solution:
        xs_inject = synt.get_x()[num.newaxis, :]

    try:
        if xs_inject is not None:
            from .optimisers import highscore
            if not isinstance(optimiser, highscore.HighScoreOptimiser):
                raise GrondError(
                    'Optimiser does not support injections.')

            optimiser.sampler_phases[0:0] = [
                highscore.InjectionSamplerPhase(xs_inject=xs_inject)]

        optimiser.optimise(
            problem,
            rundir=rundir)

        harvest(rundir, problem, force=True)

    except BadProblem as e:
        logger.error(str(e))

    except GrondError as e:
        logger.error(str(e))

    finally:
        if monitor:
            monitor.terminate()

    tstop = time.time()
    logger.info(
        'Stop %i / %i (%g min)' % (ievent+1, nevents, (tstop - tstart)/60.))

    logger.info(
        'Done with problem "%s", rundir is "%s".' % (problem.name, rundir))
Ejemplo n.º 30
0
def write_config(conf):
    conf_path = expand(conf_path_tmpl)
    util.ensuredirs(conf_path)
    dump(conf, filename=conf_path)
Ejemplo n.º 31
0
 def dump_collection(self):
     path = self.path_collection()
     util.ensuredirs(path)
     guts.dump(self._collection, filename=path)
Ejemplo n.º 32
0
def report(env, report_config=None, update_without_plotting=False):
    if report_config is None:
        report_config = ReportConfig()
        report_config.set_basepath('.')

    event_name = env.get_current_event_name()
    problem = env.get_problem()
    logger.info('Creating report for event %s...' % event_name)

    fp = report_config.expand_path
    report_path = expand_template(
        op.join(fp(report_config.reports_base_path),
                report_config.report_sub_path),
        dict(event_name=event_name, problem_name=problem.name))

    if op.exists(report_path) and not update_without_plotting:
        shutil.rmtree(report_path)

    try:
        problem.dump_problem_info(report_path)

        guts.dump(env.get_config(),
                  filename=op.join(report_path, 'config.yaml'),
                  header=True)

        util.ensuredir(report_path)
        plots_dir_out = op.join(report_path, 'plots')
        util.ensuredir(plots_dir_out)

        event = env.get_dataset().get_event()
        guts.dump(event, filename=op.join(report_path, 'event.reference.yaml'))

        try:
            rundir_path = env.get_rundir_path()

            core.export('stats', [rundir_path],
                        filename=op.join(report_path, 'stats.yaml'))

            core.export('best', [rundir_path],
                        filename=op.join(report_path,
                                         'event.solution.best.yaml'),
                        type='event-yaml')

            core.export('mean', [rundir_path],
                        filename=op.join(report_path,
                                         'event.solution.mean.yaml'),
                        type='event-yaml')

            core.export('ensemble', [rundir_path],
                        filename=op.join(report_path,
                                         'event.solution.ensemble.yaml'),
                        type='event-yaml')

        except (environment.NoRundirAvailable, ProblemInfoNotAvailable,
                ProblemDataNotAvailable):

            pass

        if not update_without_plotting:
            from grond import plot
            plot.make_plots(env, plots_path=op.join(report_path, 'plots'))

        rie = ReportIndexEntry(path='.',
                               problem_name=problem.name,
                               grond_version=problem.grond_version)

        fn = op.join(report_path, 'event.solution.best.yaml')
        if op.exists(fn):
            rie.event_best = guts.load(filename=fn)

        fn = op.join(report_path, 'event.reference.yaml')
        if op.exists(fn):
            rie.event_reference = guts.load(filename=fn)

        fn = op.join(report_path, 'index.yaml')
        guts.dump(rie, filename=fn)

    except Exception as e:
        logger.warn(
            'report generation failed, removing incomplete report dir: %s' %
            report_path)
        raise e

        if op.exists(report_path):
            shutil.rmtree(report_path)

    report_index(report_config)
    report_archive(report_config)
Ejemplo n.º 33
0
    def testOptionalDefault(self):

        from pyrocko.guts_array import Array, array_equal
        import numpy as num
        assert_ae = num.testing.assert_almost_equal

        def array_equal_noneaware(a, b):
            if a is None:
                return b is None
            elif b is None:
                return a is None
            else:
                return array_equal(a, b)

        data = [
            ('a', Int.T(),
                [None, 0, 1, 2],
                ['aerr', 0, 1, 2]),
            ('b', Int.T(optional=True),
                [None, 0, 1, 2],
                [None, 0, 1, 2]),
            ('c', Int.T(default=1),
                [None, 0, 1, 2],
                [1, 0, 1, 2]),
            ('d', Int.T(default=1, optional=True),
                [None, 0, 1, 2],
                [1, 0, 1, 2]),
            ('e', List.T(Int.T()),
                [None, [], [1], [2]],
                [[], [], [1], [2]]),
            ('f', List.T(Int.T(), optional=True),
                [None, [], [1], [2]],
                [None, [], [1], [2]]),
            ('g', List.T(Int.T(), default=[1]), [
                None, [], [1], [2]],
                [[1], [], [1], [2]]),
            ('h', List.T(Int.T(), default=[1], optional=True),
                [None, [], [1], [2]],
                [[1], [], [1], [2]]),
            ('i', Tuple.T(2, Int.T()),
                [None, (1, 2)],
                ['err', (1, 2)]),
            ('j', Tuple.T(2, Int.T(), optional=True),
                [None, (1, 2)],
                [None, (1, 2)]),
            ('k', Tuple.T(2, Int.T(), default=(1, 2)),
                [None, (1, 2), (3, 4)],
                [(1, 2), (1, 2), (3, 4)]),
            ('l', Tuple.T(2, Int.T(), default=(1, 2), optional=True),
                [None, (1, 2), (3, 4)],
                [(1, 2), (1, 2), (3, 4)]),
            ('i2', Tuple.T(None, Int.T()),
                [None, (1, 2)],
                [(), (1, 2)]),
            ('j2', Tuple.T(None, Int.T(), optional=True),
                [None, (), (3, 4)],
                [None, (), (3, 4)]),
            ('k2', Tuple.T(None, Int.T(), default=(1,)),
                [None, (), (3, 4)],
                [(1,), (), (3, 4)]),
            ('l2', Tuple.T(None, Int.T(), default=(1,), optional=True),
                [None, (), (3, 4)],
                [(1,), (), (3, 4)]),
            ('m', Array.T(shape=(None,), dtype=num.int, serialize_as='list'),
                [num.arange(0), num.arange(2)],
                [num.arange(0), num.arange(2)]),
            ('n', Array.T(shape=(None,), dtype=num.int, serialize_as='list',
                          optional=True),
                [None, num.arange(0), num.arange(2)],
                [None, num.arange(0), num.arange(2)]),
            ('o', Array.T(shape=(None,), dtype=num.int, serialize_as='list',
                          default=num.arange(2)),
                [None, num.arange(0), num.arange(2), num.arange(3)],
                [num.arange(2), num.arange(0), num.arange(2), num.arange(3)]),
            ('p', Array.T(shape=(None,), dtype=num.int, serialize_as='list',
                          default=num.arange(2), optional=True),
                [None, num.arange(0), num.arange(2), num.arange(3)],
                [num.arange(2), num.arange(0), num.arange(2), num.arange(3)]),
            ('q', Dict.T(String.T(), Int.T()),
                [None, {}, {'a': 1}],
                [{}, {}, {'a': 1}]),
            ('r', Dict.T(String.T(), Int.T(), optional=True),
                [None, {}, {'a': 1}],
                [None, {}, {'a': 1}]),
            ('s', Dict.T(String.T(), Int.T(), default={'a': 1}),
                [None, {}, {'a': 1}],
                [{'a': 1}, {}, {'a': 1}]),
            ('t', Dict.T(String.T(), Int.T(), default={'a': 1}, optional=True),
                [None, {}, {'a': 1}],
                [{'a': 1}, {}, {'a': 1}]),
        ]

        for k, t, vals, exp, in data:
            last = [None]

            class A(Object):
                def __init__(self, **kwargs):
                    last[0] = len(kwargs)
                    Object.__init__(self, **kwargs)

                v = t

            A.T.class_signature()

            for v, e in zip(vals, exp):
                if isinstance(e, str) and e == 'aerr':
                    with self.assertRaises(ArgumentError):
                        if v is not None:
                            a1 = A(v=v)
                        else:
                            a1 = A()

                    continue
                else:
                    if v is not None:
                        a1 = A(v=v)
                    else:
                        a1 = A()

                if isinstance(e, str) and e == 'err':
                    with self.assertRaises(ValidationError):
                        a1.validate()
                else:
                    a1.validate()
                    a2 = load_string(dump(a1))
                    if isinstance(e, num.ndarray):
                        assert last[0] == int(
                            not (array_equal_noneaware(t.default(), a1.v)
                                 and t.optional))
                        assert_ae(a1.v, e)
                        assert_ae(a1.v, e)
                    else:
                        assert last[0] == int(
                            not(t.default() == a1.v and t.optional))
                        self.assertEqual(a1.v, e)
                        self.assertEqual(a2.v, e)
Ejemplo n.º 34
0
    def testOptionalDefault(self):

        from pyrocko.guts_array import Array, array_equal
        import numpy as num
        assert_ae = num.testing.assert_almost_equal

        def array_equal_noneaware(a, b):
            if a is None:
                return b is None
            elif b is None:
                return a is None
            else:
                return array_equal(a, b)

        data = [
            ('a', Int.T(),
                [None, 0, 1, 2],
                ['aerr', 0, 1, 2]),
            ('b', Int.T(optional=True),
                [None, 0, 1, 2],
                [None, 0, 1, 2]),
            ('c', Int.T(default=1),
                [None, 0, 1, 2],
                [1, 0, 1, 2]),
            ('d', Int.T(default=1, optional=True),
                [None, 0, 1, 2],
                [1, 0, 1, 2]),
            ('e', List.T(Int.T()),
                [None, [], [1], [2]],
                [[], [], [1], [2]]),
            ('f', List.T(Int.T(), optional=True),
                [None, [], [1], [2]],
                [None, [], [1], [2]]),
            ('g', List.T(Int.T(), default=[1]), [
                None, [], [1], [2]],
                [[1], [], [1], [2]]),
            ('h', List.T(Int.T(), default=[1], optional=True),
                [None, [], [1], [2]],
                [[1], [], [1], [2]]),
            ('i', Tuple.T(2, Int.T()),
                [None, (1, 2)],
                ['err', (1, 2)]),
            ('j', Tuple.T(2, Int.T(), optional=True),
                [None, (1, 2)],
                [None, (1, 2)]),
            ('k', Tuple.T(2, Int.T(), default=(1, 2)),
                [None, (1, 2), (3, 4)],
                [(1, 2), (1, 2), (3, 4)]),
            ('l', Tuple.T(2, Int.T(), default=(1, 2), optional=True),
                [None, (1, 2), (3, 4)],
                [(1, 2), (1, 2), (3, 4)]),
            ('i2', Tuple.T(None, Int.T()),
                [None, (1, 2)],
                [(), (1, 2)]),
            ('j2', Tuple.T(None, Int.T(), optional=True),
                [None, (), (3, 4)],
                [None, (), (3, 4)]),
            ('k2', Tuple.T(None, Int.T(), default=(1,)),
                [None, (), (3, 4)],
                [(1,), (), (3, 4)]),
            ('l2', Tuple.T(None, Int.T(), default=(1,), optional=True),
                [None, (), (3, 4)],
                [(1,), (), (3, 4)]),
            ('m', Array.T(shape=(None,), dtype=num.int, serialize_as='list'),
                [num.arange(0), num.arange(2)],
                [num.arange(0), num.arange(2)]),
            ('n', Array.T(shape=(None,), dtype=num.int, serialize_as='list',
                          optional=True),
                [None, num.arange(0), num.arange(2)],
                [None, num.arange(0), num.arange(2)]),
            ('o', Array.T(shape=(None,), dtype=num.int, serialize_as='list',
                          default=num.arange(2)),
                [None, num.arange(0), num.arange(2), num.arange(3)],
                [num.arange(2), num.arange(0), num.arange(2), num.arange(3)]),
            ('p', Array.T(shape=(None,), dtype=num.int, serialize_as='list',
                          default=num.arange(2), optional=True),
                [None, num.arange(0), num.arange(2), num.arange(3)],
                [num.arange(2), num.arange(0), num.arange(2), num.arange(3)]),
            ('q', Dict.T(String.T(), Int.T()),
                [None, {}, {'a': 1}],
                [{}, {}, {'a': 1}]),
            ('r', Dict.T(String.T(), Int.T(), optional=True),
                [None, {}, {'a': 1}],
                [None, {}, {'a': 1}]),
            ('s', Dict.T(String.T(), Int.T(), default={'a': 1}),
                [None, {}, {'a': 1}],
                [{'a': 1}, {}, {'a': 1}]),
            ('t', Dict.T(String.T(), Int.T(), default={'a': 1}, optional=True),
                [None, {}, {'a': 1}],
                [{'a': 1}, {}, {'a': 1}]),
        ]

        for k, t, vals, exp, in data:
            last = [None]

            class A(Object):
                def __init__(self, **kwargs):
                    last[0] = len(kwargs)
                    Object.__init__(self, **kwargs)

                v = t

            A.T.class_signature()

            for v, e in zip(vals, exp):
                if isinstance(e, str) and e == 'aerr':
                    with self.assertRaises(ArgumentError):
                        if v is not None:
                            a1 = A(v=v)
                        else:
                            a1 = A()

                    continue
                else:
                    if v is not None:
                        a1 = A(v=v)
                    else:
                        a1 = A()

                if isinstance(e, str) and e == 'err':
                    with self.assertRaises(ValidationError):
                        a1.validate()
                else:
                    a1.validate()
                    a2 = load_string(dump(a1))
                    if isinstance(e, num.ndarray):
                        assert last[0] == int(
                            not (array_equal_noneaware(t.default(), a1.v)
                                 and t.optional))
                        assert_ae(a1.v, e)
                        assert_ae(a1.v, e)
                    else:
                        assert last[0] == int(
                            not(t.default() == a1.v and t.optional))
                        self.assertEqual(a1.v, e)
                        self.assertEqual(a2.v, e)
Ejemplo n.º 35
0
    def export(self,
               point,
               results_path,
               stage_number,
               fix_output=False,
               force=False,
               update=False):

        from pyrocko.guts import dump
        from kite.scene import Scene, UserIOWarning
        from beat.plotting import map_displacement_grid

        gc = self.config

        results = self.assemble_results(point)

        def get_filename(attr, ending='csv'):
            return os.path.join(
                results_path, '{}_{}_{}.{}'.format(
                    os.path.splitext(dataset.name)[0], attr, stage_number,
                    ending))

        # export for gnss
        for typ, config in gc.types.items():
            if 'GNSS' == typ:
                from pyrocko.model import gnss

                logger.info('Exporting GNSS data ...')
                campaigns = config.load_data(campaign=True)

                for campaign in campaigns:
                    model_camp = gnss.GNSSCampaign(
                        stations=copy.deepcopy(campaign.stations),
                        name='%s_model' % campaign.name)

                    dataset_to_result = {}
                    for dataset, result in zip(self.datasets, results):
                        if dataset.typ == 'GNSS':
                            dataset_to_result[dataset] = result

                    for dataset, result in dataset_to_result.items():
                        for ista, sta in enumerate(model_camp.stations):
                            comp = getattr(sta, dataset.component)
                            comp.shift = result.processed_syn[ista]
                            comp.sigma = 0.

                    outname = os.path.join(
                        results_path, 'gnss_synths_%i.yaml' % stage_number)

                    dump(model_camp, filename=outname)

            elif 'SAR' == typ:
                logger.info('Exporting SAR data ...')
                for dataset, result in zip(self.datasets, results):
                    if dataset.typ == 'SAR':
                        try:
                            scene_path = os.path.join(config.datadir,
                                                      dataset.name)
                            logger.info(
                                'Loading full resolution kite scene: %s' %
                                scene_path)
                            scene = Scene.load(scene_path)
                        except UserIOWarning:
                            logger.warning('Full resolution data could not be'
                                           ' loaded! Skipping ...')
                            continue

                        for attr in [
                                'processed_obs', 'processed_syn',
                                'processed_res'
                        ]:

                            filename = get_filename(attr, ending='csv')
                            displacements = getattr(result, attr)
                            dataset.export_to_csv(filename, displacements)
                            logger.info('Stored CSV file to: %s' % filename)

                            filename = get_filename(attr, ending='yml')
                            vals = map_displacement_grid(displacements, scene)
                            scene.displacement = vals
                            scene.save(filename)
                            logger.info('Stored kite scene to: %s' % filename)
Ejemplo n.º 36
0
def from_palantiri():
    km = 1000.
    try:
        path = sys.argv[3]
        evpath = path
    except:
        path = None
        evpath = 'events/' + str(sys.argv[1])

    C = config.Config(evpath)
    Origin = C.parseConfig('origin')
    Config = C.parseConfig('config')
    cfg = ConfigObj(dict=Config)
    step = cfg.UInt('step')
    step2 = cfg.UInt('step_f2')
    duration = cfg.UInt('duration')
    forerun = cfg.UInt('forerun')
    deltat = step
    deltat2 = step2
    rel = 'events/' + str(sys.argv[1]) + '/work/semblance/'

    dimx = int(Config['dimx'])
    dimy = int(Config['dimy'])

    origin = OriginCfg(Origin)
    depth = origin.depth() * 1000.
    ev = event.Event(lat=origin.lat(),
                     lon=origin.lon(),
                     depth=depth,
                     time=util.str_to_time(origin.time()))
    data, data_int, data_boot, data_int_boot, path_in_str, maxs, datamax, n_files = load(
        0, path=path)
    values_orig = data[:, 2]
    values_orig = num.append(values_orig, num.array([0., 0.]))

    lat_orig = data[:, 1]
    lon_orig = data[:, 0]

    ncorners = 4
    lon_grid_orig = num.linspace(num.min(lat_orig), num.max(lat_orig), (dimy))
    lat_grid_orig = num.linspace(num.min(lon_orig), num.max(lon_orig), dimx)

    if path is None:
        ntimes = int((forerun + duration) / step)
    else:
        ntimes = n_files

    verts = []
    lon_diff = ((lon_orig)[dimy + 1] - (lon_orig)[0]) / 4.
    lat_diff = ((lat_orig)[1] - (lat_orig)[0]) / 4.

    dist = orthodrome.distance_accurate50m(lat_grid_orig[1], lon_grid_orig[1],
                                           lat_grid_orig[0], lon_grid_orig[0])

    for x, y in zip(lon_orig, lat_orig):

        xyz = ([dist / 2., dist / 2.,
                depth], [-dist / 2., dist / 2.,
                         depth], [-dist / 2., -dist / 2.,
                                  depth], [dist / 2., -dist / 2., depth])
        latlon = ([x, y], [x, y], [x, y], [x, y])
        patchverts = num.hstack((latlon, xyz))
        verts.append(patchverts)

    vertices = num.vstack(verts)

    npatches = int(len(vertices))  #*2?
    faces1 = num.arange(ncorners * npatches,
                        dtype='int64').reshape(npatches, ncorners)
    faces2 = num.fliplr(faces1)
    faces = num.vstack((faces2, faces1))
    srf_semblance_list = []
    for i in range(0, ntimes):
        if len(sys.argv) < 4:
            print("missing input arrayname")
        else:
            data, data_int, data_boot, data_int_boot, path_in_str, maxsb, datamaxb, n_files = load(
                0, step=i, path=path)
            srf_semblance = data[:, 2]
            srf_semblance = num.append(srf_semblance, num.array([0., 0.]))
            srf_semblance = duplicate_property(srf_semblance)
            srf_semblance_list.append(srf_semblance)

    srf_semblance = num.asarray(srf_semblance_list).T
    srf_times = num.linspace(0, forerun + duration, ntimes)
    geom = Geometry(times=srf_times, event=ev)
    geom.setup(vertices, faces)
    sub_headers = tuple([str(i) for i in srf_times])
    geom.add_property((('semblance', 'float64', sub_headers)), srf_semblance)
    dump(geom, filename='geom.yaml')
Ejemplo n.º 37
0
def write_config(conf):
    conf_path = expand(conf_path_tmpl)
    util.ensuredirs(conf_path)
    dump(conf, filename=conf_path)