import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' import argparse from seisnn.core import Feature from seisnn.utils import get_config from seisnn.io import read_dataset from seisnn.plot import plot_loss from seisnn.example_proto import batch_iterator ap = argparse.ArgumentParser() ap.add_argument('-m', '--model', required=False, help='model', type=str) args = ap.parse_args() config = get_config() SAVE_MODEL_PATH = os.path.join(config['MODELS_ROOT'], args.model) SAVE_HISTORY_PATH = os.path.join(SAVE_MODEL_PATH, 'history') SAVE_PNG_PATH = os.path.join(SAVE_MODEL_PATH, 'png') loss_log = os.path.join(SAVE_MODEL_PATH, f'{args.model}.log') plot_loss(loss_log, SAVE_MODEL_PATH) dataset = read_dataset(SAVE_HISTORY_PATH) for batch in dataset.batch(2): for example in batch_iterator(batch): feature = Feature(example) feature.plot(title=feature.id, save_dir=SAVE_PNG_PATH) print(feature.id)
'--pre_train', required=True, help='pre-train model', type=str) ap.add_argument('-m', '--model', required=True, help='save model', type=str) args = ap.parse_args() config = get_config() SAVE_MODEL_PATH = os.path.join(config['MODELS_ROOT'], args.model) make_dirs(SAVE_MODEL_PATH) SAVE_HISTORY_PATH = os.path.join(SAVE_MODEL_PATH, "history") make_dirs(SAVE_HISTORY_PATH) dataset_dir = os.path.join(config['DATASET_ROOT'], args.dataset) dataset = read_dataset(dataset_dir).skip(1000) val = next(iter(dataset.batch(1))) val_trace = val['trace'][:, :, :, 0, tf.newaxis] val_pdf = val['pdf'][:, :, :, 0, tf.newaxis] ckpt = tf.train.Checkpoint(model=model, optimizer=optimizer) ckpt_manager = tf.train.CheckpointManager(ckpt, SAVE_MODEL_PATH, max_to_keep=100) if args.pre_train: PRE_TRAIN_PATH = os.path.join(config['MODELS_ROOT'], args.pre_train) for file in glob.glob(os.path.join(PRE_TRAIN_PATH, 'ckpt*')): shutil.copy2(file, SAVE_MODEL_PATH)
required=True, help='output dataset', type=str) ap.add_argument('-m', '--model', required=True, help='model', type=str) args = ap.parse_args() config = get_config() MODEL_PATH = os.path.join(config['MODELS_ROOT'], args.model) make_dirs(MODEL_PATH) OUTPUT_DATASET = os.path.join(config['DATASET_ROOT'], args.output) make_dirs(OUTPUT_DATASET) INPUT_DATASET = os.path.join(config['DATASET_ROOT'], args.input) dataset = read_dataset(INPUT_DATASET) ckpt = tf.train.Checkpoint(model=model, optimizer=optimizer) ckpt_manager = tf.train.CheckpointManager(ckpt, MODEL_PATH, max_to_keep=100) if ckpt_manager.latest_checkpoint: ckpt.restore(ckpt_manager.latest_checkpoint) last_epoch = len(ckpt_manager.checkpoints) print(f'Latest checkpoint epoch {last_epoch} restored!!') n = 0 for batch in dataset.take(1000).batch(512).prefetch(2): pdf = model.predict(batch['trace']) batch['pdf'] = tf.concat([batch['pdf'], pdf], axis=3) phase = batch['phase'].to_list()
import os os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3' import argparse from seisnn.utils import get_config from seisnn.io import read_dataset from seisnn.core import Feature from seisnn.example_proto import batch_iterator ap = argparse.ArgumentParser() ap.add_argument('-d', '--dataset', required=False, help='dataset', type=str) args = ap.parse_args() config = get_config() dataset_dir = os.path.join(config['DATASET_ROOT'], args.dataset) dataset = read_dataset(dataset_dir) for batch in dataset.shuffle(1000).batch(2): for example in batch_iterator(batch): feature = Feature(example) feature.plot(enlarge=True, snr=True)
ap = argparse.ArgumentParser() ap.add_argument('-d', '--dataset', required=True, help='dataset', type=str) ap.add_argument('-m', '--model', required=True, help='save model', type=str) args = ap.parse_args() config = get_config() SAVE_MODEL_PATH = os.path.join(config['MODELS_ROOT'], args.model) shutil.rmtree(SAVE_MODEL_PATH, ignore_errors=True) make_dirs(SAVE_MODEL_PATH) SAVE_HISTORY_PATH = os.path.join(SAVE_MODEL_PATH, "history") make_dirs(SAVE_HISTORY_PATH) dataset_dir = os.path.join(config['DATASET_ROOT'], args.dataset) dataset = read_dataset(dataset_dir).shuffle(10000).take(1000) val = next(iter(dataset.batch(1))) val_trace = val['trace'] val_pdf = val['pdf'] ckpt = tf.train.Checkpoint(model=model, optimizer=optimizer) ckpt_manager = tf.train.CheckpointManager(ckpt, SAVE_MODEL_PATH, max_to_keep=100) EPOCHS = 1 for epoch in range(EPOCHS): n = 0 loss_buffer = [] for train in dataset.prefetch(100).batch(1):
import pandas as pd from obspy import UTCDateTime from seisnn.core import Feature from seisnn.utils import get_config from seisnn.io import read_dataset from seisnn.qc import signal_to_noise_ratio from seisnn.plot import plot_snr_distribution ap = argparse.ArgumentParser() ap.add_argument('-d', '--dataset', required=False, help='dataset', type=str) args = ap.parse_args() config = get_config() dataset_dir = os.path.join(config['DATASET_ROOT'], args.dataset) dataset = read_dataset(dataset_dir).shuffle(100000).prefetch(10) pick_snr = [] n = 0 for example in dataset: feature = Feature(example) picks = pd.DataFrame.from_dict({ 'pick_time': feature.pick_time, 'pick_phase': feature.pick_phase, 'pick_set': feature.pick_set }) picks = picks.loc[picks['pick_set'] == "manual"] for i, p in picks.iterrows(): pick_time = UTCDateTime(p['pick_time']) - UTCDateTime( feature.starttime)