def dump_model(file,
               language,
               method,
               seed,
               library,
               sampling_rate,
               normalization,
               shape,
               augmentation,
               params=None,
               extra={}):
    from time import time
    from deep_audio import JSON

    dump_info = {
        'method': method,
        'language': language,
        'normalization': normalization,
        'seed': seed,
        'augmentation': augmentation,
        'library': library,
        'sample_rate': sampling_rate,
        'shape': shape,
        'timestamp': time(),
        'params': params,
        **extra
    }

    JSON.create_json_file(file, dump_info, cls=JSON.NumpyEncoder)

    return
def dump_grid(file,
              model,
              language,
              method,
              seed,
              library,
              sizes,
              score_train,
              score_test,
              sampling_rate,
              normalization,
              shape,
              augmentation,
              score_valid=None,
              model_file=None,
              extra={}):
    from time import time
    from deep_audio import JSON

    dump_info = {
        'method': method,
        'language': language,
        'normalization': normalization,
        'seed': seed,
        'augmentation': augmentation,
        'library': library,
        'sample_rate': sampling_rate,
        'shape': shape,
        'sizes': sizes,
        'score_train': score_train,
        'score_test': score_test,
        'timestamp': time(),
        'params': model.best_params_,
        'cv_results': model.cv_results_,
        **extra
    }

    if score_valid:
        dump_info['score_valid'] = score_valid

    if model:
        dump_info['model_file'] = model_file

    JSON.create_json_file(file, dump_info, cls=JSON.NumpyEncoder)

    return
示例#3
0
def object_to_json(filename, attrs, files):
    from deep_audio import JSON

    data = {
        'mapping': [file.replace('.wav', '') for _, file in enumerate(files)],
        'classes': [],
        'labels': [],
        'attrs': []
    }

    for i in attrs:
        data['attrs'].extend(i['attrs'])
        data['labels'].extend(i['labels'])
        data['classes'].extend(i['classes'])

    JSON.create_json_file(filename, data, cls=JSON.NumpyEncoder)

    del data
示例#4
0
# %%
# CRIA O MODELO
learning_rate = 0.0001
model = build_model(learning_rate=learning_rate)

# %%
# SALVA A ESTRUTURA DO MODELO

timestamp = int(time.time())

Directory.create_directory(
    f'{language}/models/{model_algo}/{library}/{filename_ps}{timestamp}')

JSON.create_json_file(
    f'{language}/models/{model_algo}/{library}/{filename_ps}{timestamp}/model_structure.json',
    model.to_json())

model_save_filename = f'{language}/models/{model_algo}/{library}/{filename_ps}{timestamp}/model_weight.h5'

# DECIDE QUANDO PARAR
earlystopping_cb = EarlyStopping(patience=300, restore_best_weights=True)

# SALVA OS PESOS
mdlcheckpoint_cb = ModelCheckpoint(model_save_filename,
                                   monitor="val_accuracy",
                                   save_best_only=True)

# %%
# TREINA O MODELO
history = model.fit(X_train,
示例#5
0
score_test = model.score(X_test, y_test)

score_train = model.score(X_train, y_train)

y_hat = model.predict(X_test)

filename_model = Directory.model_filename(method,
                                          language,
                                          library,
                                          normalization,
                                          score_test,
                                          augmentation=augment,
                                          json=False) + 'model.json'

JSON.create_json_file(file=filename_model,
                      data=globals()['build_' + method]().to_json())

# SALVA ACURÁCIAS E PARAMETROS
Model.dump_grid(Directory.model_filename(method,
                                         language,
                                         library,
                                         normalization,
                                         score_test,
                                         augmentation=augment),
                model=model,
                language=language,
                method=method,
                normalization=normalization,
                sampling_rate=sampling_rate,
                augmentation=augment,
                shape=X_train.shape,
示例#6
0
model.fit(X_train, y_train)

best_params = model.best_params_

score_test = model.score(X_test, y_test)

score_train = model.score(X_train, y_train)

y_hat = model.predict(X_test)

filename_model = Directory.model_filename(
    'cnn', language, library, normalization, score_test, json=False)+'model.json'

JSON.create_json_file(
    file=filename_model,
    data=build_model().to_json()
)

# SALVA ACURÁCIAS E PARAMETROS
Model.dump_grid(
    Directory.model_filename(
        'cnn', language, library, normalization, score_test),
    model=model,
    language=language,
    method='CNN',
    normalization=normalization,
    sampling_rate=sampling_rate,
    seed=random_state,
    library=library,
    sizes=[len(X_train), len(X_valid), len(X_test)],
    score_train=score_train,
示例#7
0
                                           augmentation=augment,
                                           json=False,
                                           models=True)

model_save_filename = filename_holder + 'weight.h5'

# SALVA OS PESOS
mdlcheckpoint_cb = keras.callbacks.ModelCheckpoint(model_save_filename,
                                                   monitor="accuracy",
                                                   save_best_only=True)

# history = model.fit(X_train, y_train, epochs=epochs,
#                     batch_size=batch_size, callbacks=[mdlcheckpoint_cb])

# SALVA ESTRUTURA DO MODELO
JSON.create_json_file(file=filename_holder + 'model.json',
                      data=globals()['build_' + method]().to_json())

dump(scaler, open(filename_holder + 'scaler.pkl', 'wb'))

# SALVA OS PARAMETROS
Model.dump_model(filename_holder + 'info.json',
                 params=param_grid,
                 language=language,
                 method=method,
                 normalization=normalization,
                 sampling_rate=sampling_rate,
                 augmentation=augment,
                 shape=X_train.shape,
                 seed=random_state,
                 library=library,
                 extra={