import keras import numpy as np import os import pandas as pd import pescador import tensorflow as tf import time import oyaml as yaml import localmodule from models import create_flat_singletask_coarse_model from keras.optimizers import Adam # Define constants. train_data_dir = localmodule.get_train_data_dir() train_dataset_name = localmodule.get_train_dataset_name() valid_data_dir = localmodule.get_valid_data_dir() valid_dataset_name = localmodule.get_valid_dataset_name() models_dir = localmodule.get_models_dir() n_input_hops = 104 n_filters = [24, 48, 48] kernel_size = [5, 5] pool_size = [2, 4] n_hidden_units = 64 # Read command-line arguments. parser = argparse.ArgumentParser() parser.add_argument('aug_kind_str') parser.add_argument('trial_str') parser.add_argument('--lr', type=float, default=1e-4) parser.add_argument('--base-wd', type=float, default=1e-4) parser.add_argument('--batch-size', type=int, default=128)
min_valid_idxs = valid_idxs min_valid_distr = valid_distr min_train_distr = train_distr # Get list of train and validation "sources" train_sources = [] valid_sources = [] for idx, source in enumerate(source_name_list): if idx in min_valid_idxs: valid_sources.append(source) else: train_sources.append(source) # Copy train and validation data train_dir = os.path.join(localmodule.get_train_data_dir(), "original") valid_dir = os.path.join(localmodule.get_valid_data_dir(), "original") for root, dirs, files in os.walk(anafcc_dir): for fname in files: for source in train_sources: if (fname.endswith('.hdf5') or fname.endswith('.h5')) and fname.startswith(source): src_path = os.path.join(root, fname) rel_path = src_path.replace(anafcc_dir, '') dst_path = os.path.join(train_dir, rel_path) os.makedirs(os.path.dirname(dst_path), exist_ok=True) shutil.copy(src_path, dst_path) for source in valid_sources: if (fname.endswith('.hdf5') or fname.endswith('.h5')) and fname.startswith(source): src_path = os.path.join(root, fname) rel_path = src_path.replace(anafcc_dir, '')