Ejemplo n.º 1
0
def get_combined_data():
    # reading train data
    train = read_training_data()
    # reading test data
    test = read_test_data()
    # import ipdb; ipdb.set_trace()
    # extracting and then removing the targets from the training data
    targets = train.Survived
    train.drop('Survived', 1, inplace=True)
    # merging train data and test data for future feature engineering
    # import ipdb; ipdb.set_trace()
    combined = train.append(test)
    combined.reset_index(inplace=True)
    combined.drop('index', inplace=True, axis=1)

    return combined
Ejemplo n.º 2
0
 def predict(self):
     X_test = read_data.read_test_data()
     return self.model.predict(X_test, verbose=1)
Ejemplo n.º 3
0
    sp_train = params.sp_train
    sp_cont = params.sp_cont

    ap_train = params.ap_train
    ap_cont = params.ap_cont

    spectral_data, aperiodic_data, label_data, cutoff_points = read_training_data(
        model_name, load=load_data)

    singing_model = SingingModel(spectral_data, aperiodic_data, label_data,
                                 cutoff_points, model_name)

    if sp_train:
        singing_model.train_model(SPECTRAL_MODE, sp_cont)

    if ap_train:
        singing_model.train_model(APERIODIC_MODE, ap_cont)

    label_data, frequency = read_test_data(model_name)

    spectral_output = singing_model.inference(label_data, SPECTRAL_MODE)

    aperiodic_output = singing_model.inference(label_data, APERIODIC_MODE,
                                               spectral_output)

    spectral_output, aperiodic_output = decode_envelopes(
        spectral_output, aperiodic_output, params.sample_rate, model_name)

    construct_audio(spectral_output, aperiodic_output, frequency, output_name)
Ejemplo n.º 4
0
def test_performance(model):
    test_x, test_y = read_test_data()

    test_pred = model.predict(test_x, batch_size=512, verbose=1)
    evals = get_evals(test_y, test_pred)
    return evals
Ejemplo n.º 5
0
import tensorflow as tf
from tensorflow._api.v1.keras import layers
import read_data

x_train, y_train = read_data.read_train_data()
x_test, y_test = read_data.read_test_data()
num, H, W, _ = x_train.shape

model = tf.keras.models.Sequential([
    tf.keras.layers.Conv2D(filters=16,
                           kernel_size=3,
                           strides=(1, 1),
                           padding='valid',
                           data_format='channels_last',
                           activation='relu',
                           use_bias=True,
                           input_shape=(H, W, 1)),
    tf.keras.layers.Conv2D(filters=32,
                           kernel_size=3,
                           strides=(1, 1),
                           padding='same',
                           data_format='channels_last',
                           activation='relu',
                           use_bias=True,
                           input_shape=(H - 2, W - 2, 16)),
    tf.keras.layers.Conv2D(filters=64,
                           kernel_size=3,
                           strides=(1, 1),
                           padding='same',
                           data_format='channels_last',
                           activation='relu',
Ejemplo n.º 6
0
if __name__ == "__main__":
    TRAIN_DATA, LEMMA2SENSES, LEMMA2INT = read_data.read_train_data(
        read_data.read_x("ALL.data.xml")[0],
        read_data.read_y("ALL.gold.key.bnids.txt"), True)
    MAX_NB_SENSES = max([len(LEMMA2SENSES[k]) for k in LEMMA2SENSES
                         ])  # max number of senses among all target words
    MAX_NB_TARGETS = len(LEMMA2SENSES)  # how many target words

    # load word embedding initialized by init_emb (run init_emb first if you don't have this file)
    with open('pretrained_vectors/needed' + '.pkl', 'rb') as f:
        WORD_VECTORS = pickle.load(f)
    WORD_VECTORS["_drop_"] = np.random.uniform(
        -0.1, 0.1, 300)  # add drop vector for drop words

    NB_EPOCHS = 100  # number of epochs to train
    x_val, y_val, _ = read_data.read_test_data(
        LEMMA2INT, LEMMA2SENSES, WORD_VECTORS)  # read validation data
    """train models"""
    tf.reset_default_graph()
    train_model = models.Model2(MAX_NB_SENSES, 32, MAX_NB_TARGETS)
    val_model = models.Model2(MAX_NB_SENSES,
                              32,
                              MAX_NB_TARGETS,
                              is_training=False)
    print("train models created")
    """run train models"""
    init = tf.global_variables_initializer()
    sess = tf.Session()
    sess.run(init)
    VAL_ACC_LIST = []
    LOSS_LIST = []
    TRAIN_ACC_LIST = []