Example #1
0
def manual_modelling(tensor_info, data_type, default_dirs):
    """Routine for manual finite fault modelling.
    
    :param tensor_info: dictionary with moment tensor properties
    :param data_type: list with data types to be used in modelling
    :param default_dirs: dictionary with default directories to be used
    :type default_dirs: dict
    :type tensor_info: dict
    :type data_type: list
    """
    if not os.path.isdir('logs'):
        os.mkdir('logs')
    if not os.path.isdir('plots'):
        os.mkdir('plots')
    segments_data = json.load(open('segments_data.json'))
    min_vel, max_vel = __ask_velrange()
    logger = ml.create_log('manual_ffm', os.path.join('logs',
                                                      'manual_ffm.log'))
    logger.info('Write input files')
    tensor.write_tensor(tensor_info)
    writing_inputs(tensor_info, data_type, segments_data, min_vel, max_vel)
    inversion(tensor_info, data_type, default_dirs, logger)
    logger.info('Plot data in folder {}'.format(os.getcwd()))
    execute_plot(tensor_info, data_type, segments_data, default_dirs)
    ml.close_log(logger)
Example #2
0
def automatic_usgs(tensor_info,
                   data_type,
                   default_dirs,
                   velmodel=None,
                   dt_cgps=1.0):
    """Routine for automatic FFM modelling
    
    :param tensor_info: dictionary with moment tensor properties
    :param data_type: list with data types to be used in modelling
    :param default_dirs: dictionary with default directories to be used
    :param velmodel: dictionary with velocity model
    :param dt_cgps: sampling interval for cgps data 
    :type tensor_info: dict
    :type data_type: list
    :type default_dirs: dict
    :type velmodel: dict, optional
    :type dt_cgps: float, optional
    """
    logger = ml.create_log('automatic_ffm',
                           os.path.join('logs', 'automatic_ffm.log'))
    logger = ml.add_console_handler(logger)
    logger.info('Starting fff program')
    sol_folder = os.getcwd()
    sol_folder = os.path.abspath(sol_folder)
    time0 = time.time()
    data_prop = tp.properties_json(tensor_info, dt_cgps=dt_cgps)
    os.chdir(os.path.join(sol_folder, 'data'))
    time2 = time.time()
    logger.info('Process data')
    processing(tensor_info, data_type, data_prop)
    time2 = time.time() - time2
    logger.info('Time spent processing traces: {}'.format(time2))
    os.chdir(sol_folder)
    logger.info('Compute GF bank')
    if not velmodel:
        velmodel = mv.select_velmodel(tensor_info, default_dirs)
    input_files.write_velmodel(velmodel)
    gf_bank_str = os.path.join(sol_folder, 'GF_strong')
    gf_bank_cgps = os.path.join(sol_folder, 'GF_cgps')
    get_gf_bank = default_dirs['strong_motion_gf_bank2']
    if 'cgps' in data_type:
        logger.info('Compute cGPS GF bank')
        green_dict = gf.fk_green_fun1(data_prop,
                                      tensor_info,
                                      gf_bank_cgps,
                                      cgps=True)
        input_files.write_green_file(green_dict, cgps=True)
        with open(os.path.join('logs', 'GF_cgps_log'), "w") as out_gf_cgps:
            p1 = subprocess.Popen([get_gf_bank, 'cgps'], stdout=out_gf_cgps)
        p1.wait()
    if 'strong_motion' in data_type:
        logger.info('Compute strong motion GF bank')
        green_dict = gf.fk_green_fun1(data_prop, tensor_info, gf_bank_str)
        input_files.write_green_file(green_dict)
        with open(os.path.join('logs', 'GF_strong_log'), "w") as out_gf_strong:
            p2 = subprocess.Popen([
                get_gf_bank,
            ], stdout=out_gf_strong)
        p2.wait()

    files = [
        'Green.in', 'Green_cgps.in', 'modelling_stats.json',
        os.path.join('data', 'gps_data'), 'strong_motion_gf.json',
        'cgps_gf.json', 'sampling_filter.json'
    ]
    folders = ['NP1', 'NP2']
    for folder in folders:
        for file in files:
            if os.path.isfile(file):
                copy2(file, folder)
    info_np1, info_np2 = tensor.planes_from_tensor(tensor_info)
    keywords = {'velmodel': velmodel}
    os.chdir(os.path.join(sol_folder, 'NP1'))
    p1 = Process(target=_automatic2,
                 args=(tensor_info, info_np1, data_type, data_prop,
                       default_dirs, logger),
                 kwargs=keywords)
    p1.start()
    os.chdir(os.path.join(sol_folder, 'NP2'))
    p2 = Process(target=_automatic2,
                 args=(tensor_info, info_np2, data_type, data_prop,
                       default_dirs, logger),
                 kwargs=keywords)
    p2.start()
    [p.join() for p in [p1, p2]]
    logger.info('Time spent: {}'.format(time.time() - time0))
    ml.close_log(logger)
    return
Example #3
0
def checkerboard(tensor_info,
                 data_type,
                 default_dirs,
                 max_slip=200,
                 add_error=False,
                 option='Checkerboard',
                 option2='FFM modelling'):
    """Routine for running checkerboard tests.
    
    :param tensor_info: dictionary with moment tensor properties
    :param data_type: set with data types to be used in modelling
    :param option: string with location of input file with kinematic model to
     use
    :param max_slip: maximum slip in case of checkerboard test
    :param add_error: whether we add noise to synthetic waveforms
    :param option2: whether we invert the checkerboard model or not
    :param default_dirs: dictionary with default directories to be used
    :type default_dirs: dict
    :type tensor_info: dict
    :type data_type: set
    :type option: string, optional
    :type max_slip: float, optional
    :type add_error: bool, optional
    :type option2: string, optional
    """
    if max_slip > 0:
        folder_name = 'checkerboard_resolution'
    else:
        folder_name = 'checkerboard_noise'
    if not option == 'Checkerboard':
        folder_name = option
    if not os.path.isdir(folder_name):
        os.mkdir(folder_name)
    if not os.path.isdir('logs'):
        os.mkdir('logs')
    if not os.path.isdir('plots'):
        os.mkdir('plots')
    for file in os.listdir():
        if os.path.isfile(file):
            copy2(file, folder_name)
    os.chdir(folder_name)
    forward_modelling(tensor_info,
                      data_type,
                      default_dirs,
                      option=option,
                      max_slip=max_slip)
    data_prop = json.load(open('sampling_filter.json'))
    for data_type0 in data_type:
        if data_type0 == 'tele_body':
            json_dict = 'tele_waves.json'
        if data_type0 == 'surf_tele':
            json_dict = 'surf_waves.json'
        if data_type0 == 'strong_motion':
            json_dict = 'strong_motion_waves.json'
        if data_type0 == 'cgps':
            json_dict = 'cgps_waves.json'
        if data_type0 == 'gps':
            json_dict = 'static_data.json'
        if data_type0 == 'dart':
            json_dict = 'dart_waves.json'
        files = json.load(open(json_dict))
        input_files.from_synthetic_to_obs(files,
                                          data_type0,
                                          tensor_info,
                                          data_prop,
                                          add_error=add_error)
    logger = ml.create_log('checkerboard_ffm',
                           os.path.join('logs', 'checkerboard_ffm.log'))
    if option2 == 'FFM modelling':
        inversion(tensor_info, data_type, default_dirs, logger)
        execute_plot(tensor_info, data_type, default_dirs, plot_input=True)
    ml.close_log(logger)
Example #4
0
def forward_modelling(tensor_info,
                      data_type,
                      default_dirs,
                      option='Solucion.txt',
                      max_slip=200):
    """Routine for forward modelling.
    
    :param tensor_info: dictionary with moment tensor properties
    :param data_type: set with data types to be used in modelling
    :param option: string with location of input file with kinematic model to
     use
    :param max_slip: maximum slip in case of checkerboard test
    :param default_dirs: dictionary with default directories to be used
    :type default_dirs: dict
    :type tensor_info: dict
    :type data_type: set, optional
    :type option: string, optional
    :type max_slip: float, optional
    """
    tensor.write_tensor(tensor_info)
    if not os.path.isdir('logs'):
        os.mkdir('logs')
    if not os.path.isdir('plots'):
        os.mkdir('plots')
    len_stk = 5 if not option == 'point_source' else 8
    len_dip = 5 if not option == 'point_source' else 1
    segments_data = json.load(open('segments_data.json'))
    #
    # Get input model
    #
    model = load_ffm_model(segments_data,
                           option=option,
                           max_slip=max_slip,
                           len_stk=len_stk,
                           len_dip=len_dip)
    if not os.path.isfile('velmodel_data.json'):
        raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT),
                                'velmodel_data.json')
    velmodel = json.load(open('velmodel_data.json'))
    min_vel, max_vel = __ask_velrange()

    logger = ml.create_log('forward_model',
                           os.path.join('logs', 'forward_model.log'))
    logger.info('Write input files')
    segments, rise_time, point_sources = pl_mng.__read_planes_info()
    shear = pf.shear_modulous(point_sources, velmodel=velmodel)
    dx = segments_data[0]['delta_x']
    dy = segments_data[0]['delta_y']
    slip = model['slip']
    zipped = zip(slip, shear)
    moment_sub = [dx * dy * slip_seg * shear_seg\
                  for slip_seg, shear_seg in zipped]
    moment = np.sum(
        [np.sum(moment_seg.flatten()) for moment_seg in moment_sub])
    moment = 10**10 * moment
    writing_inputs(tensor_info,
                   data_type,
                   segments_data,
                   min_vel,
                   max_vel,
                   moment_mag=moment,
                   forward_model=model)
    inversion(tensor_info, data_type, default_dirs, logger, forward=True)
    logger.info('Plot data in folder {}'.format(os.getcwd()))
    execute_plot(tensor_info,
                 data_type,
                 segments_data,
                 default_dirs,
                 velmodel=velmodel)
    ml.close_log(logger)
Example #5
0
def gf_retrieve(used_data_type, default_dirs):
    """Compute and store in binary files Green functions for each station, both
    for teleseismic body waves, as for strong motion, cGPS and static data.
    
    :param used_data_type: list with data types used in modelling
    :param default_dirs: dictionary with default directories to be used
    :type used_data_type: list
    :type default_dirs: dict
    """
    green_fun_tele = default_dirs['tele_gf']
    green_fun_str = default_dirs['strong_motion_gf']
    green_fun_gps = default_dirs['gps_gf']
    
    processes = []
    loggers = []
    ch = logging.StreamHandler()
    formatter = logging.Formatter('%(levelname)s - %(message)s')
    ch.setFormatter(formatter)
    ch.setLevel(logging.ERROR)
    
    if 'tele_body' in used_data_type:
        logger1 = ml.create_log('body_wave_GF',
                                os.path.join('logs', 'green_tele_log'))
        logger1.addHandler(ch)
        p1 = subprocess.Popen([green_fun_tele], stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE)
        # with open(os.path.join('logs', 'green_tele_log'), "w") as out_tele:
        #     p1 = subprocess.Popen([green_fun_tele], stdout=out_tele)
        processes = processes + [p1]
        loggers = loggers + [logger1]
    if 'strong_motion' in used_data_type:
        logger2 = ml.create_log('get_strong_motion_GF',
                                os.path.join('logs', 'green_str_log'))
        logger2.addHandler(ch)
        p2 = subprocess.Popen([green_fun_str], stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE)
        # with open(os.path.join('logs', 'green_np1_str_log'), "w") as out_strong:
        #     p2 = subprocess.Popen([green_fun_str], stdout=out_strong)
        processes = processes + [p2]
        loggers = loggers + [logger2]
    if 'cgps' in used_data_type:
        logger3 = ml.create_log('get_cgps_GF',
                                os.path.join('logs', 'green_cgps_log'))
        logger3.addHandler(ch)
        p3 = subprocess.Popen([green_fun_str, 'cgps'], stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE)
        # with open(os.path.join('logs', 'green_np1_cgps_log'), "w") as out_cgps:
            # p3 = subprocess.Popen([green_fun_str, 'cgps'], stdout=out_cgps)
        processes = processes + [p3]
        loggers = loggers + [logger3]
    if 'gps' in used_data_type:
        logger4 = ml.create_log('GPS_GF',
                                os.path.join('logs', 'green_gps_log'))
        logger4.addHandler(ch)
        p4 = subprocess.Popen([green_fun_gps, ], stdout=subprocess.PIPE,
                              stderr=subprocess.PIPE)
        processes = processes + [p4]
        loggers = loggers + [logger4]

    # [p.wait() for p in processes]
    for p, log in zip(processes, loggers):
        out, err = p.communicate(timeout=20 * 60)
        log.info(out.decode('utf-8'))
        if err: log.error(err.decode('utf-8', 'ignore'))
        ml.close_log(log)