示例#1
0
def pick_catalog():
    """Process observed traces in catalog."""
    ws = Workspace()
    ws.rm('picked')

    obs = getpath('obs')
    syn = getpath('syn')

    if obs and syn:
        obsdir = ws.subdir(obs)
        syndir = ws.subdir(syn)

        for trace in obsdir.ls():
            event = trace.split('.')[0]

            ws.mkdir(f'picked/{event}')

            ws.add(
                asdf_task((obsdir.abs(trace),
                           syndir.abs(syndir.ls(grep=f'{event}.*')[0])),
                          None,
                          partial(pick, event),
                          input_type='stream',
                          accessor=True,
                          name=event))

    return ws
示例#2
0
    def bracket(self, ws: Workspace):
        # update misfit value
        step = len(self.misfits)
        self.misfits.append(read_misfit(ws.abs()))
        d.write(f'step {step}: {self.steps[-1]:.4e} {self.misfits[-1]:.4e}\n', 'misfit.log', 'a')

        x, f = self.get_history()
        alpha = None

        if self.check_bracket(x, f):
            if self.good_enough(x, f):
                step = x[f.argmin()]

                for j, s in enumerate(self.steps):
                    if np.isclose(step, s):
                        d.write(f'new model: step {j}\n\n', 'misfit.log', 'a')
                        self.ln(f'step_{j-1:02d}/model_gll.bp', '../model_new.bp')
                        self.parent['search_step'] = s
                        return
                
            alpha = self.polyfit(x,f)
            
        elif len(self.steps) - 1 < self.nsteps:
            if all(f <= f[0]):
                alpha = 1.618034 * x[-1]
            
            else:
                alpha = x[1] / 1.618034
        
        if alpha:
            self.steps.append(alpha)
            self.add_step()
        
        else:
            raise RuntimeError('line search failed', self.steps, self.misfits)
示例#3
0
    def add_step(self):
        step = len(self.misfits) - 1

        self.add(ws := Workspace(f'step_{step:02d}'))
        ws.add(ws.mkdir)

        # update model
        model = self.abs('../model_init.bp')

        if step == 0:
            mesh = self.abs('../kernel/solver_synthetic/DATABASES_MPI/solver_data.bp')
        
        else:
            mesh = self.abs(f'step_{step-1:02d}/kernel_misfit/solver_synthetic/DATABASES_MPI/solver_data.bp')
        
        cmd = f'{self.abs("../../adios/xupdate_model")} {self.steps[-1]} {model} {mesh} {self.abs("../direction.bp")} .'
        ws.add(partial(ws.mpiexec, cmd, nprocs=getsize()))
        
        # compute misfit
        ws.add(kernel := create_kernel('kernel_misfit', {
            'misfit_only': True, 'path_model': ws.abs('model_gll.bp'), 'path_encoded': self.abs('../kernel/observed.ft.h5')
        }))

        # compute next step
        ws.add(partial(self.bracket, kernel))
示例#4
0
    def setup(self):
        # link adios binaries
        self.ln(getpath('adios', 'bin'), 'adios')

        for i in range(self.niters):
            # workspace for the i-th iteration
            self.add(ws := Workspace(f'iter_{i:02d}', {'iteration': i}))

            # link initial model
            path_model = getpath('model_init') if i == 0 else self.abs(
                f'iter_{i-1:02d}/model_new.bp')
            ws.add(partial(ws.ln, path_model, 'model_init.bp'), 'link_models')

            # compute kernels
            ws.add(
                create_kernel('kernel',
                              {'path_model': ws.abs('model_init.bp')}))
            ws.add(partial(ws.ln, ws.abs('kernel/kernels.bp'), 'kernels.bp'),
                   'link_kernels')

            # compute direction
            ws.add(partial(self.compute_direction, ws, i))

            # line search
            ws.add(create_search('search'))
示例#5
0
def _compute_weightings(ws: Workspace, target: str, percentage: float):
    from obspy.geodetics import locations2degrees

    locations = ws.load(f'locations/{target}.pickle')
    npts = len(locations)
    dists = np.zeros([npts, npts])

    # compute weight in current rank
    for i, l1 in enumerate(locations.values()):
        for j, l2 in enumerate(locations.values()):
            if i != j:
                dists[i, j] = dists[j, i] = locations2degrees(l1[0], l1[1], l2[0], l2[1])
    
    # search for optimal condition number
    ref_dists = np.linspace(1, 100, 100)
    conds = []
    arrs = []

    for ref_dist in ref_dists:
        dists_exp = np.exp(-(dists / ref_dist) ** 2)
        arr = 1 / np.sum(dists_exp, axis=1)
        arr /= np.sum(arr) / len(arr)
        
        conds.append(cond := arr.max() / arr.min())
        arrs.append(arr)

        if cond <= 0.8 * max(conds):
            break
    
    for i in range(len(conds)):
        if conds[i] >= percentage * max(conds):
            print(target, conds[i], min(arrs[i]), max(arrs[i]))

            weightings = {}

            for j, station in enumerate(locations.keys()):
                weightings[station] = arrs[i][j]
            
            ws.dump(weightings, f'weightings/{target}.pickle')
            return
    
    raise RuntimeError(f'failed to obtain condition number for {target} {conds}')
示例#6
0
def compute_weightings(event_weighting: Optional[float], station_weighting: Optional[float], dst: str):
    """Compute geographical weightings."""
    ws = Workspace('compute_weightings')

    # save location
    ws.add(partial(_save_locations, ws))

    # compute event weightings
    if event_weighting:
        func = partial(_compute_weightings, ws, 'event', event_weighting)
        ws.add(partial(ws.mpiexec, func, walltime='compute_weightings'), 'event_weightings')

    # compute station weightings
    ws.add(subws := Workspace('station_weightings', concurrent=True))

    for event in get_events():
        func = partial(_compute_weightings, ws, f'station.{event}', station_weighting)
        subws.add(partial(ws.mpiexec, func, walltime='compute_weightings'), event)

    # move results to catalog director
    ws.add(partial(ws.mv, 'weightings', dst), 'export_result')

    return ws
示例#7
0
    def setup(self):
        self.clear()

        # add steps to compute and process adjoint sources
        self.add(self._prepare_frequencies)

        # create super source
        self.add(self._encode_events)

        # compute weighting
        if self.event_weighting or self.station_weighting:
            if not catalogdir.has(self.ampstr):
                self.add(compute_weightings(self.event_weighting, self.station_weighting, catalogdir.abs(self.ampstr)))
            
            self.add(self._load_weightings)

        if self.path_encoded:
            self.add(partial(self.ln, self.path_encoded, 'observed.ft.h5'), 'link_observed')
        
        elif self.test_encoding == 1:
            # generate observed traces
            self.add(solver := create_solver('solver_observed', {
                'path_event': self.abs('SUPERSOURCE'),
                'path_stations': self.abs('SUPERSTATION'),
                'path_model': getpath('model_true'),
                'monochromatic_source': True,
                'save_forward': False,
                'process_traces': {
                    'dst': self.abs('observed.ft.h5'),
                    'func': partial(self._ft, None),
                    'input_type': 'stream',
                    'output_tag': 'FT',
                    'accessor': True
                }
            }))
        
        else:
            # prepare observed data and save to catalog directory
            ws = Workspace('prepare_observed', concurrent=True)

            for event in get_events():
                # location of processed traces
                if catalogdir.has(fname := f'{self.freqstr}/{event}.ft.h5'):
                    continue
示例#8
0
def _save_locations(ws: Workspace):
    """Save the locations of events and stations for weight computation."""
    event_loc = locate_events()
    station_loc = locate_stations()
    
    ws.dump(event_loc, 'locations/event.pickle')

    for event in get_events():
        loc = {}

        for station in get_stations(event):
            loc[station] = station_loc[station]
        
        ws.dump(loc, f'locations/station.{event}.pickle')

    ws.mkdir('weightings')
示例#9
0
def download_catalog():
    ws = Workspace()
    ws.mkdir('mseed')
    ws.mkdir('xml')
    ws.mkdir('stations')
    ws.mkdir('raw_obs')
    ws.mkdir('tmp')

    ws.add(ws1 := Workspace('download'))
    ws.add(ws2 := Workspace('convert'))

    for event in d.ls('events'):
        ws1.add(partial(download_event, event), event)
        ws2.add(partial(convert, event), event)
    
    return ws