def stage_data(self):

        # Setup optimization dir_tree.
        directories = ['bin', 'PROCESSED_KERNELS', 'GRADIENT_INFO',
                       'LOGS', 'DATA', 'VTK_FILES']

        for d in directories:
            full_path = os.path.join(self.config.optimization_dir, d)
            self.remote_machine.makedir(full_path)

        # Get full path to complete events.
        alt_solver_dirpath = self.config.solver_dir
        full_events = [os.path.join(alt_solver_dirpath, event, 'DATABASES_MPI') for event in self.complete_events]
        write_kernels = '\n'.join(full_events)
        self.remote_machine.write_file(
            os.path.join(self.config.optimization_dir, 'kernels_list.txt'),
            write_kernels)

        # Copy binaries.
        bins = os.path.join(self.config.specfem_src_dir, 'bin', '*')
        dest = os.path.join(self.config.optimization_dir, 'bin')
        self.remote_machine.execute_command('rsync {} {}'.format(bins, dest))

        # Get DATA directory to fool SPECFEM's optimization routines.
        data_path = os.path.join(alt_solver_dirpath, self.complete_events[0], 'DATA', '*')
        data_dest = os.path.join(self.config.optimization_dir, 'DATA')
        self.remote_machine.execute_command('rsync -a {} {}'.format(data_path, data_dest))

        # Write sbatch.
        with io.open(utilities.get_template_file('sbatch'), 'r') as fh:
            sbatch_string = fh.read().format(**self.sbatch_dict)
        sbatch_path = os.path.join(self.config.optimization_dir, 'run_add_kernels.sbatch')
        with self.remote_machine.ftp_connection.file(sbatch_path, 'wt') as fh:
            fh.write(sbatch_string)
Esempio n. 2
0
    def stage_data(self):

        # Write script
        remote_script = os.path.join(self.config.solver_dir, "process_synthetics.py")
        with io.open(utilities.get_script_file("process_synthetics"), "r") as fh:
            script_string = fh.readlines()
        script_string.insert(0, "#!{}\n".format(self.config.python_exec))
        self.remote_machine.write_file(remote_script, "".join(script_string))

        # Copy over pickle file.
        info = {"lowpass": self.iteration_info["lowpass"],
                "highpass": self.iteration_info["highpass"],
                "event_list": self.event_info.keys()}
        tmp_pickle = "tmp_pickle.p"
        remote_pickle = os.path.join(self.config.solver_dir, "info.p")
        with io.open(tmp_pickle, "wb") as fh:
            cPickle.dump(info, fh)
        self.remote_machine.put_file(tmp_pickle, remote_pickle)
        os.remove(tmp_pickle)

        # Copy sbatch file.
        remote_sbatch = os.path.join(self.config.solver_dir, "process_synthetics.sbatch")
        with io.open(utilities.get_template_file("sbatch"), "r") as fh:
            sbatch_string = fh.read().format(**self.sbatch_dict)
        self.remote_machine.write_file(remote_sbatch, sbatch_string)
Esempio n. 3
0
    def stage_data(self):

        self.remote_machine.makedir(self.config.preprocessing_dir)
        click.secho('Copying stations...')
        self.remote_machine.execute_command('rsync -a {} {}'.format(
            os.path.join(self.config.lasif_project_path, 'STATIONS', 'StationXML'),
            self.config.preprocessing_dir))
        with click.progressbar(self.all_events, label="Copying data...") as events:
            for event in events:
                raw_dir = os.path.join(self.config.lasif_project_path, 'DATA', event, 'raw', 'data.mseed')
                event_dir = os.path.join(self.config.preprocessing_dir, event)
                self.remote_machine.makedir(event_dir)
                self.remote_machine.execute_command('rsync {} {}'.format(raw_dir, event_dir))

        self.remote_machine.put_file('lasif_data.p',
                                     os.path.join(self.config.preprocessing_dir, 'lasif_data.p'))

        remote_script = os.path.join(self.config.preprocessing_dir, "preprocess_data.py")
        with io.open(utilities.get_script_file('preprocess_data'), 'r') as fh:
            script_string = fh.readlines()
        script_string.insert(0, '#!{}\n'.format(self.config.python_exec))
        self.remote_machine.write_file(remote_script, ''.join(script_string))

        remote_sbatch = os.path.join(self.config.preprocessing_dir, 'preprocess_data.sbatch')
        with io.open(utilities.get_template_file('sbatch'), 'r') as fh:
            sbatch_string = fh.read().format(**self.sbatch_dict)
        self.remote_machine.write_file(remote_sbatch, sbatch_string)
Esempio n. 4
0
    def stage_data(self):

        # Get adjoint_sources.p
        raw_dir = os.path.join(self.config.lasif_project_path,
                               'ADJOINT_SOURCES_AND_WINDOWS/ADJOINT_SOURCES',
                               self.config.base_iteration, 'adjoint_sources.p')
        event_dir = os.path.join(self.config.solver_dir)
        self.remote_machine.execute_command('rsync {} {}'.format(
            raw_dir, event_dir))

        # Get lasif_data.p
        raw_dir = os.path.join(self.config.lasif_project_path, 'lasif_data.p')
        event_dir = os.path.join(self.config.solver_dir)
        self.remote_machine.execute_command('rsync {} {}'.format(
            raw_dir, event_dir))

        remote_script = os.path.join(self.config.solver_dir,
                                     "write_adjoint_sources.py")
        with io.open(utilities.get_script_file('write_adjoint_sources'),
                     'r') as fh:
            script_string = fh.readlines()
        script_string.insert(0, '#!{}\n'.format(self.config.python_exec))
        self.remote_machine.write_file(remote_script, ''.join(script_string))

        remote_sbatch = os.path.join(self.config.solver_dir,
                                     'write_adjoint_sources.sbatch')
        with io.open(utilities.get_template_file('sbatch'), 'r') as fh:
            sbatch_string = fh.read().format(**self.sbatch_dict)
        self.remote_machine.write_file(remote_sbatch, sbatch_string)
Esempio n. 5
0
    def stage_data(self):

        # Write script
        remote_script = os.path.join(self.config.solver_dir,
                                     "process_synthetics.py")
        with io.open(utilities.get_script_file("process_synthetics"),
                     "r") as fh:
            script_string = fh.readlines()
        script_string.insert(0, "#!{}\n".format(self.config.python_exec))
        self.remote_machine.write_file(remote_script, "".join(script_string))

        # Copy over pickle file.
        info = {
            "lowpass": self.iteration_info["lowpass"],
            "highpass": self.iteration_info["highpass"],
            "event_list": self.event_info.keys()
        }
        tmp_pickle = "tmp_pickle.p"
        remote_pickle = os.path.join(self.config.solver_dir, "info.p")
        with io.open(tmp_pickle, "wb") as fh:
            cPickle.dump(info, fh)
        self.remote_machine.put_file(tmp_pickle, remote_pickle)
        os.remove(tmp_pickle)

        # Copy sbatch file.
        remote_sbatch = os.path.join(self.config.solver_dir,
                                     "process_synthetics.sbatch")
        with io.open(utilities.get_template_file("sbatch"), "r") as fh:
            sbatch_string = fh.read().format(**self.sbatch_dict)
        self.remote_machine.write_file(remote_sbatch, sbatch_string)
Esempio n. 6
0
 def stage_data(self):
     self.sbatch_dict['execute'] = 'srun ./bin/xadd_model_tiso {:f}'.format(
         self.perturbation_percent)
     remote_sbatch = os.path.join(self.config.optimization_dir,
                                  'add_smoothed_gradient.sbatch')
     with io.open(utilities.get_template_file('sbatch'), 'r') as fh:
         sbatch_string = fh.read().format(**self.sbatch_dict)
     self.remote_machine.write_file(remote_sbatch, sbatch_string)
    def stage_data(self):
        self.remote_machine.makedir(self.config.adjoint_dir)
        hpass = 1 / self.iteration_info['highpass']
        lpass = 1 / self.iteration_info['lowpass']


        with open('./lasif_data.p', 'rb') as fh:
            f = cPickle.load(fh)
        if self.config.input_data_type == 'noise':
                f.append({'input_data_type': 'noise'})
        elif self.config.input_data_type == 'earthquake':
                f.append({'input_data_type': 'earthquake'})
        with open('./lasif_data.p', 'wb') as fh:
                cPickle.dump(f,fh)


        with click.progressbar(self.all_events, label="Copying preprocessed data...") as events:
            for event in events:
                try:
                    raw_dir = os.path.join(self.config.lasif_project_path, 'DATA', event, 'preprocessed_{:.1f}_{:.1f}'.format(lpass, hpass), 'preprocessed_data.mseed')
                    event_dir = os.path.join(self.config.adjoint_dir, event)
                    self.remote_machine.makedir(event_dir)
                    self.remote_machine.execute_command('rsync {} {}'.format(raw_dir, event_dir))
                except:
                    print "\nCould not sync preprocessed_data.mseed for: " + event

        with click.progressbar(self.all_events, label="Copying synthetics...") as events:
            for event in events:
                try:
                    raw_dir = os.path.join(self.config.lasif_project_path, 'SYNTHETICS', event, self.config.base_iteration, 'synthetics.mseed')
                    event_dir = os.path.join(self.config.adjoint_dir, event)
                    self.remote_machine.execute_command('rsync {} {}'.format(raw_dir, event_dir))
                except:
                    print "\nCould not sync synthetics.mseed for: " + event

        with click.progressbar(self.all_events, label="Copying windows...") as events:
            for event in events:
                try:
                    raw_dir = os.path.join(self.config.lasif_project_path, 'ADJOINT_SOURCES_AND_WINDOWS/WINDOWS', self.config.first_iteration ,event, 'windows.p')
                    event_dir = os.path.join(self.config.adjoint_dir, event)
                    self.remote_machine.execute_command('rsync {} {}'.format(raw_dir, event_dir))
                except:
                    print "\nCould not sync window.p for: " + event


        self.remote_machine.put_file('lasif_data.p',
                                     os.path.join(self.config.adjoint_dir, 'lasif_data.p'))

        remote_script = os.path.join(self.config.adjoint_dir, "create_adjoint_sources.py")
        with io.open(utilities.get_script_file('create_adjoint_sources'), 'r') as fh:
            script_string = fh.readlines()
        script_string.insert(0, '#!{}\n'.format(self.config.python_exec))
        self.remote_machine.write_file(remote_script, ''.join(script_string))

        remote_sbatch = os.path.join(self.config.adjoint_dir, 'create_adjoint_sources.sbatch')
        with io.open(utilities.get_template_file('sbatch'), 'r') as fh:
            sbatch_string = fh.read().format(**self.sbatch_dict)
        self.remote_machine.write_file(remote_sbatch, sbatch_string)
    def stage_data(self):

        with open('./lasif_data.p', 'rb') as fh:
            f = cPickle.load(fh)
        if self.config.input_data_type == 'noise':
                f.append({'input_data_type': 'noise'})
        elif self.config.input_data_type == 'earthquake':
                f.append({'input_data_type': 'earthquake'})
        with open('./lasif_data.p', 'wb') as fh:
                cPickle.dump(f,fh)

        hpass = 1 / self.iteration_info['highpass']
        lpass = 1 / self.iteration_info['lowpass']
        pre_dir_string = 'preprocessed_{:.1f}_{:.1f}'.format(lpass, hpass)

        # Copy remote data.
        all_events = sorted(self.event_info.keys())
        self.remote_machine.makedir(self.config.window_dir)
        syn_base = os.path.join(self.config.lasif_project_path, "SYNTHETICS")
        dat_base = os.path.join(self.config.lasif_project_path, 'DATA')
        with click.progressbar(all_events, label="Copying data...") as events:
            for event in events:
                event_dir = os.path.join(self.config.window_dir, event)
                self.remote_machine.makedir(event_dir)

                syn_dat = os.path.join(syn_base, event,
                                       self.config.base_iteration, "synthetics.mseed")
                self.remote_machine.execute_command(
                    "rsync {} {}".format(syn_dat, event_dir))

                pro_dat = os.path.join(dat_base, event, pre_dir_string, 'preprocessed_data.mseed')
                self.remote_machine.execute_command(
                    'rsync {} {}'.format(pro_dat, event_dir))

        # Put Stations
        click.secho('Copying stations...')
        self.remote_machine.execute_command('rsync -a {} {}'.format(
            os.path.join(self.config.lasif_project_path, 'STATIONS', 'StationXML'),
            self.config.window_dir))

        # Put data
        file = "lasif_data.p"
        self.remote_machine.put_file(file, os.path.join(self.config.window_dir, file))

        # Put python script.
        remote_script = os.path.join(self.config.window_dir, 'select_windows.py')
        with io.open(utilities.get_script_file('select_windows'), 'r') as fh:
            script_string = fh.readlines()
        script_string.insert(0, '#!{}\n'.format(self.config.python_exec))
        self.remote_machine.write_file(remote_script, ''.join(script_string))

        # Sbatch
        self.sbatch_dict["python_exec"] = os.path.dirname(self.config.python_exec)
        remote_sbatch = os.path.join(self.config.window_dir, 'select_windows.sbatch')
        with io.open(utilities.get_template_file('sbatch_python_parallel'), 'r') as fh:
            sbatch_string = fh.read().format(**self.sbatch_dict)
        self.remote_machine.write_file(remote_sbatch, sbatch_string)
Esempio n. 9
0
    def stage_data(self):
        hpass = 1 / self.iteration_info['highpass']
        lpass = 1 / self.iteration_info['lowpass']
        pre_dir_string = 'preprocessed_{:.1f}_{:.1f}'.format(lpass, hpass)

        # Copy remote data.
        all_events = sorted(self.event_info.keys())
        self.remote_machine.makedir(self.config.window_dir)
        syn_base = os.path.join(self.config.lasif_project_path, "SYNTHETICS")
        dat_base = os.path.join(self.config.lasif_project_path, 'DATA')
        with click.progressbar(all_events, label="Copying data...") as events:
            for event in events:
                event_dir = os.path.join(self.config.window_dir, event)
                self.remote_machine.makedir(event_dir)

                syn_dat = os.path.join(syn_base, event,
                                       self.config.base_iteration,
                                       "synthetics.mseed")
                self.remote_machine.execute_command("rsync {} {}".format(
                    syn_dat, event_dir))

                pro_dat = os.path.join(dat_base, event, pre_dir_string,
                                       'preprocessed_data.mseed')
                self.remote_machine.execute_command('rsync {} {}'.format(
                    pro_dat, event_dir))

        # Put Stations
        click.secho('Copying stations...')
        self.remote_machine.execute_command('rsync -a {} {}'.format(
            os.path.join(self.config.lasif_project_path, 'STATIONS',
                         'StationXML'), self.config.window_dir))

        # Put data
        file = "lasif_data.p"
        self.remote_machine.put_file(
            file, os.path.join(self.config.window_dir, file))

        # Put python script.
        remote_script = os.path.join(self.config.window_dir,
                                     'select_windows.py')
        with io.open(utilities.get_script_file('select_windows'), 'r') as fh:
            script_string = fh.readlines()
        script_string.insert(0, '#!{}\n'.format(self.config.python_exec))
        self.remote_machine.write_file(remote_script, ''.join(script_string))

        # Sbatch
        self.sbatch_dict["python_exec"] = os.path.dirname(
            self.config.python_exec)
        remote_sbatch = os.path.join(self.config.window_dir,
                                     'select_windows.sbatch')
        with io.open(utilities.get_template_file('sbatch_python_parallel'),
                     'r') as fh:
            sbatch_string = fh.read().format(**self.sbatch_dict)
        self.remote_machine.write_file(remote_sbatch, sbatch_string)
Esempio n. 10
0
    def generate_event_xml(self, event):

        station_xml_name = os.path.join(
                        "./STATION_XML_META",
                        "station.{}_{}.meta.xml".format(event.split('.')[0], event.split('.')[1]))
        inv = obspy.read_inventory(station_xml_name, format="stationxml")
        channel = inv.get_contents()['channels']
        sta_loc = inv.get_coordinates(channel[0])

        with io.open(utilities.get_template_file('event'), 'rt') as fh:
            event_template = fh.read()

        return event_template.format(event_name=event,start_time=self.startTime, latitude=sta_loc['latitude'],
                                     longitude=sta_loc['longitude'], depth=sta_loc['local_depth'])
Esempio n. 11
0
    def stage_data(self):

        with io.open(utilities.get_template_file("sbatch"), "r") as fh:
            sbatch_string = fh.read().format(**self.sbatch_dict)

        sbatch_path = os.path.join(self.config.solver_dir, "MESH", "run_mesher.sbatch")
        with self.remote_machine.ftp_connection.file(sbatch_path, "wt") as fh:
            fh.write(sbatch_string)

        par_file_path = os.path.join(
            self.config.solver_dir, "MESH", "DATA", "Par_file")
        if self.model_type == 'CEM_GLL':
            print 'using model type: step_length'
            if self.config.simulation_type == 'regional':
                with io.open(utilities.get_template_file("Par_file_regional"), "rt") as fh:
                    self.remote_machine.write_file(
                        par_file_path,
                        fh.read().format(**utilities.set_params_step_length(self.config.specfem_dict)))
            else:
                with io.open(utilities.get_template_file("Par_file"), "rt") as fh:
                    self.remote_machine.write_file(
                        par_file_path,
                        fh.read().format(**utilities.set_params_step_length(self.config.specfem_dict)))

        elif self.model_type == 'CEM':
            if self.config.simulation_type == 'regional':
                with io.open(utilities.get_template_file("Par_file_regional"), "rt") as fh:
                    self.remote_machine.write_file(
                        par_file_path,
                        fh.read().format(**utilities.set_params_forward_save(self.config.specfem_dict,
                                                                             self.config.model)))
            elif self.config.simulation_type == 'global':
                with io.open(utilities.get_template_file("Par_file"), "rt") as fh:
                    self.remote_machine.write_file(
                        par_file_path,
                        fh.read().format(**utilities.set_params_forward_save(self.config.specfem_dict,
                                                                             self.config.model)))
Esempio n. 12
0
    def stage_data(self):
        if self.vtk_type == 'kernel':
            self.files = [
                'bulk_betah_kernel_smooth', 'bulk_betav_kernel_smooth',
                'bulk_c_kernel_smooth', 'eta_kernel_smooth',
                'hess_inv_kernel_smooth'
            ]
            file_src_dir = os.path.join(self.config.optimization_dir,
                                        'PROCESSED_KERNELS')

            #self.files = ['bulk_betah_kernel', 'bulk_betav_kernel', 'bulk_c_kernel',
            #  'eta_kernel', 'hess_inv_kernel']
        elif self.vtk_type == 'model':
            self.files = ['vsh', 'vsv']
            file_src_dir = os.path.join(self.config.solver_dir, 'MESH',
                                        'DATABASES_MPI')

        kernel_output_dir = os.path.join(self.config.optimization_dir,
                                         'VTK_FILES')  # WHY THIS?
        topo_dir = os.path.join(self.config.solver_dir, 'MESH',
                                'DATABASES_MPI')
        self.remote_machine.makedir(kernel_output_dir)

        # Write slices.txt
        slices = ""
        slices_path = os.path.join(self.config.optimization_dir, 'VTK_FILES',
                                   'slices.txt')
        for i in range(self.nslices):
            slices += str(i) + "\n"
        with self.remote_machine.ftp_connection.file(slices_path, 'wt') as fh:
            fh.write(slices)

        execute_string = ''
        for file in self.files:
            execute_string += (
                'srun ./bin/xcombine_vol_data_vtk ./VTK_FILES/slices.txt {} {} {} {} 0 1\n'
                .format(file, topo_dir, file_src_dir, kernel_output_dir))
            self.sbatch_dict['execute'] = execute_string

        # Write sbatch.
        with io.open(utilities.get_template_file('sbatch'), 'r') as fh:
            sbatch_string = fh.read().format(**self.sbatch_dict)
            sbatch_path = os.path.join(self.config.optimization_dir,
                                       'make_vtk.sbatch')
        with self.remote_machine.ftp_connection.file(sbatch_path, 'wt') as fh:
            fh.write(sbatch_string)
Esempio n. 13
0
    def generate_event_xml(self, event):

        station_xml_name = os.path.join(
            "./STATION_XML_META", "station.{}_{}.meta.xml".format(
                event.split('.')[0],
                event.split('.')[1]))
        inv = obspy.read_inventory(station_xml_name, format="stationxml")
        channel = inv.get_contents()['channels']
        sta_loc = inv.get_coordinates(channel[0])

        with io.open(utilities.get_template_file('event'), 'rt') as fh:
            event_template = fh.read()

        return event_template.format(event_name=event,
                                     start_time=self.startTime,
                                     latitude=sta_loc['latitude'],
                                     longitude=sta_loc['longitude'],
                                     depth=sta_loc['local_depth'])
Esempio n. 14
0
    def stage_data(self):

        kernel_dir = os.path.join(self.config.optimization_dir, 'PROCESSED_KERNELS')
        topo_dir = os.path.join(self.config.solver_dir, 'MESH', 'DATABASES_MPI')
        self.kernels = ['bulk_betah_kernel', 'bulk_betav_kernel', 'bulk_c_kernel', 'eta_kernel', 'hess_inv_kernel']

        # Need to write a specific sbatch script for each kernel.
        for kernel in self.kernels:
            self.sbatch_dict['execute'] = 'srun ./bin/xsmooth_sem {:d} {:d} {} {} {}'.format(
                                                self.sigma_h, self.sigma_v, kernel, kernel_dir, topo_dir)
            self.sbatch_dict['job_name'] = 'smooth_{}'.format(kernel)
            self.sbatch_dict['error'] = 'smooth_{}.stderr'.format(kernel)
            self.sbatch_dict['output'] = 'smooth_{}.stdout'.format(kernel)

            # Write sbatch.
            with io.open(utilities.get_template_file('sbatch'), 'r') as fh:
                sbatch_string = fh.read().format(**self.sbatch_dict)
                sbatch_path = os.path.join(self.config.optimization_dir, 'run_smooth_{}.sbatch'.format(kernel))
            with self.remote_machine.ftp_connection.file(sbatch_path, 'wt') as fh:
                fh.write(sbatch_string)
Esempio n. 15
0
    def stage_data(self):
        if self.vtk_type == 'smoothed_kernel':
            self.files = ['bulk_betah_kernel_smooth', 'bulk_betav_kernel_smooth',
                          'bulk_c_kernel_smooth', 'eta_kernel_smooth', 'hess_inv_kernel_smooth']
            file_src_dir = os.path.join(self.config.optimization_dir, 'PROCESSED_KERNELS')

        elif self.vtk_type == 'raw_kernel':
            self.files = ['bulk_betah_kernel', 'bulk_betav_kernel', 'bulk_c_kernel',
                          'eta_kernel', 'hess_inv_kernel']
            file_src_dir = os.path.join(self.config.optimization_dir, 'PROCESSED_KERNELS')

        elif self.vtk_type == 'model':
            self.files = ['vsh', 'vsv']
            file_src_dir = os.path.join(self.config.solver_dir, 'MESH', 'DATABASES_MPI')

        kernel_output_dir = os.path.join(self.config.optimization_dir, 'VTK_FILES') # WHY THIS?
        topo_dir = os.path.join(self.config.solver_dir, 'MESH', 'DATABASES_MPI')
        self.remote_machine.makedir(kernel_output_dir)

        # Write slices.txt
        slices = ""
        slices_path = os.path.join(self.config.optimization_dir, 'VTK_FILES', 'slices.txt')
        for i in range(self.nslices):
            slices += str(i) + "\n"
        with self.remote_machine.ftp_connection.file(slices_path, 'wt') as fh:
            fh.write(slices)

        # Need to write a specific sbatch script for each file.
        for element in self.files:
            self.sbatch_dict['execute'] = 'srun ./bin/xcombine_vol_data_vtk ./VTK_FILES/slices.txt {} {} {} {} 0 1\n'\
                                            .format(element, topo_dir, file_src_dir, kernel_output_dir)
            self.sbatch_dict['job_name'] = 'make_vtk_{}'.format(element)
            self.sbatch_dict['error'] = 'make_vtk_{}.stderr'.format(element)
            self.sbatch_dict['output'] = 'make_vtk_{}.stdout'.format(element)

            # Write sbatch.
            with io.open(utilities.get_template_file('sbatch'), 'r') as fh:
                sbatch_string = fh.read().format(**self.sbatch_dict)
                sbatch_path = os.path.join(self.config.optimization_dir, 'run_make_vtk_{}.sbatch'.format(element))
            with self.remote_machine.ftp_connection.file(sbatch_path, 'wt') as fh:
                fh.write(sbatch_string)
Esempio n. 16
0
    def stage_data(self):

        # Get adjoint_sources.p
        raw_dir = os.path.join(self.config.lasif_project_path,  'ADJOINT_SOURCES_AND_WINDOWS/ADJOINT_SOURCES',
                               self.config.base_iteration, 'adjoint_sources.p')
        event_dir = os.path.join(self.config.solver_dir)
        self.remote_machine.execute_command('rsync {} {}'.format(raw_dir, event_dir))

        # Get lasif_data.p
        raw_dir = os.path.join(self.config.lasif_project_path,  'lasif_data.p')
        event_dir = os.path.join(self.config.solver_dir)
        self.remote_machine.execute_command('rsync {} {}'.format(raw_dir, event_dir))

        remote_script = os.path.join(self.config.solver_dir, "write_adjoint_sources.py")
        with io.open(utilities.get_script_file('write_adjoint_sources'), 'r') as fh:
            script_string = fh.readlines()
        script_string.insert(0, '#!{}\n'.format(self.config.python_exec))
        self.remote_machine.write_file(remote_script, ''.join(script_string))

        remote_sbatch = os.path.join(self.config.solver_dir, 'write_adjoint_sources.sbatch')
        with io.open(utilities.get_template_file('sbatch'), 'r') as fh:
            sbatch_string = fh.read().format(**self.sbatch_dict)
        self.remote_machine.write_file(remote_sbatch, sbatch_string)
Esempio n. 17
0
    def stage_data(self):
        if self.vtk_type == 'kernel':
            self.files = ['bulk_betah_kernel_smooth', 'bulk_betav_kernel_smooth',
                       'bulk_c_kernel_smooth', 'eta_kernel_smooth', 'hess_inv_kernel_smooth']
            file_src_dir = os.path.join(self.config.optimization_dir, 'PROCESSED_KERNELS')

            #self.files = ['bulk_betah_kernel', 'bulk_betav_kernel', 'bulk_c_kernel',
                                            #  'eta_kernel', 'hess_inv_kernel']
        elif self.vtk_type == 'model':
            self.files = ['vsh', 'vsv']
            file_src_dir = os.path.join(self.config.solver_dir, 'MESH', 'DATABASES_MPI')

        kernel_output_dir = os.path.join(self.config.optimization_dir, 'VTK_FILES') # WHY THIS?
        topo_dir = os.path.join(self.config.solver_dir, 'MESH', 'DATABASES_MPI')
        self.remote_machine.makedir(kernel_output_dir)

        # Write slices.txt
        slices = ""
        slices_path = os.path.join(self.config.optimization_dir, 'VTK_FILES', 'slices.txt')
        for i in range(self.nslices):
            slices += str(i) + "\n"
        with self.remote_machine.ftp_connection.file(slices_path, 'wt') as fh:
            fh.write(slices)

        execute_string = ''
        for file in self.files:
            execute_string += ('srun ./bin/xcombine_vol_data_vtk ./VTK_FILES/slices.txt {} {} {} {} 0 1\n'
                               .format(file, topo_dir, file_src_dir, kernel_output_dir))
            self.sbatch_dict['execute'] = execute_string

        # Write sbatch.
        with io.open(utilities.get_template_file('sbatch'), 'r') as fh:
            sbatch_string = fh.read().format(**self.sbatch_dict)
            sbatch_path = os.path.join(self.config.optimization_dir, 'make_vtk.sbatch')
        with self.remote_machine.ftp_connection.file(sbatch_path, 'wt') as fh:
            fh.write(sbatch_string)
Esempio n. 18
0
    def stage_data(self):
        self.remote_machine.makedir(self.config.adjoint_dir)
        hpass = 1 / self.iteration_info['highpass']
        lpass = 1 / self.iteration_info['lowpass']

        with open('./lasif_data.p', 'rb') as fh:
            f = cPickle.load(fh)
        if self.config.input_data_type == 'noise':
            f.append({'input_data_type': 'noise'})
        elif self.config.input_data_type == 'earthquake':
            f.append({'input_data_type': 'earthquake'})
        with open('./lasif_data.p', 'wb') as fh:
            cPickle.dump(f,fh)\

        with click.progressbar(self.all_events,
                               label="Copying preprocessed data...") as events:
            for event in events:
                try:
                    raw_dir = os.path.join(
                        self.config.lasif_project_path, 'DATA', event,
                        'preprocessed_{:.1f}_{:.1f}'.format(lpass, hpass),
                        'preprocessed_data.mseed')
                    event_dir = os.path.join(self.config.adjoint_dir, event)
                    self.remote_machine.makedir(event_dir)
                    self.remote_machine.execute_command('rsync {} {}'.format(
                        raw_dir, event_dir))
                except:
                    print "\nCould not sync preprocessed_data.mseed for: " + event

        with click.progressbar(self.all_events,
                               label="Copying synthetics...") as events:
            for event in events:
                try:
                    raw_dir = os.path.join(self.config.lasif_project_path,
                                           'SYNTHETICS', event,
                                           self.config.base_iteration,
                                           'synthetics.mseed')
                    event_dir = os.path.join(self.config.adjoint_dir, event)
                    self.remote_machine.execute_command('rsync {} {}'.format(
                        raw_dir, event_dir))
                except:
                    print "\nCould not sync synthetics.mseed for: " + event

        with click.progressbar(self.all_events,
                               label="Copying windows...") as events:
            for event in events:
                try:
                    raw_dir = os.path.join(
                        self.config.lasif_project_path,
                        'ADJOINT_SOURCES_AND_WINDOWS/WINDOWS',
                        self.config.first_iteration, event, 'windows.p')
                    event_dir = os.path.join(self.config.adjoint_dir, event)
                    self.remote_machine.execute_command('rsync {} {}'.format(
                        raw_dir, event_dir))
                except:
                    print "\nCould not sync window.p for: " + event

        self.remote_machine.put_file(
            'lasif_data.p',
            os.path.join(self.config.adjoint_dir, 'lasif_data.p'))

        remote_script = os.path.join(self.config.adjoint_dir,
                                     "create_adjoint_sources.py")
        with io.open(utilities.get_script_file('create_adjoint_sources'),
                     'r') as fh:
            script_string = fh.readlines()
        script_string.insert(0, '#!{}\n'.format(self.config.python_exec))
        self.remote_machine.write_file(remote_script, ''.join(script_string))

        remote_sbatch = os.path.join(self.config.adjoint_dir,
                                     'create_adjoint_sources.sbatch')
        with io.open(utilities.get_template_file('sbatch'), 'r') as fh:
            sbatch_string = fh.read().format(**self.sbatch_dict)
        self.remote_machine.write_file(remote_sbatch, sbatch_string)
Esempio n. 19
0
 def stage_data(self):
     self.sbatch_dict['execute'] = 'srun ./bin/xadd_model_tiso {:f}'.format(self.perturbation_percent)
     remote_sbatch = os.path.join(self.config.optimization_dir, 'add_smoothed_gradient.sbatch')
     with io.open(utilities.get_template_file('sbatch'), 'r') as fh:
         sbatch_string = fh.read().format(**self.sbatch_dict)
     self.remote_machine.write_file(remote_sbatch, sbatch_string)