예제 #1
0
def load_models(models_root):    
    global models
    global labels
    
    model_dir = models_root
    print("\nDIR: " + model_dir)

    label_file = os.path.join(model_dir, "labels.json")
    print("Loading labels: " + str(label_file))
    with open(label_file) as fp:
        labels = json.load(fp)
        print("Labels:" + str(labels))
    print("Labels loaded.")

    for file in os.listdir(model_dir):
        if file.endswith(".model"):
            model_name = file[:-6]
            print("\nModel name: " + model_name)
                    
            file = os.path.join(model_dir, file)

            print("Loading model and weights: " + file)
            model = load_keras_model(file)
            print("Model loaded.")

            models[model_name] = model
예제 #2
0
def load_model():
    # load the pre-trained Keras model (here we are using a model
    # pre-trained on ImageNet and provided by Keras, but you can
    # substitute in your own networks just as easily)
    global graph
    graph = tf.get_default_graph()
    global model
    print(f'Loading model {modelFile}')
    model = load_keras_model(modelFile)
예제 #3
0
def load_model(game_name, ex_it_algorithm, iteration):
    path = "./Trained_models/" + game_name + "/" + ex_it_algorithm.__name__ + "/" + str(iteration) + ".h5"
    # Hard-coding the custom loss.
    custom_objects = None
    if ex_it_algorithm.apprentice.use_custom_loss:
        from ExIt.Apprentice.Nn import custom_loss
        custom_objects = {'custom_loss': custom_loss}

    return load_keras_model(path, custom_objects=custom_objects)
예제 #4
0
def _load_keras(model_subpath, special_classes):
    '''Loads a keras model from a context subdirectory'''
    from keras.models import load_model as load_keras_model

    context = get_context()
    model_path = context.build_path(
        model_subpath)  # /different/path/to/context/root/abc123/model.h5
    model = load_keras_model(model_path)
    return model
예제 #5
0
파일: model.py 프로젝트: cpgaffney1/cs231n
def load_model(name):
    '''
    Loads saved model and config object
    :param name: Name of model saved from a previous training run
    :return: keras model object and config object
    '''
    path = 'models/' + name + '/'
    model = load_keras_model(path + 'model')
    config = load_config(path)
    return model, config
def load_model(filename):
    """
    Loads the specified Keras model from a file.

    Parameters
    ----------
    filename : string
        The name of the file to read from

    Returns
    -------
    Keras model
        The Keras model loaded from a file
    """

    return load_keras_model(__construct_path(filename))
예제 #7
0
def load_model(filename):
    """
    Loads the specified Keras model from a file.

    Parameters
    ----------
    filename : string
        The name of the file to read from

    Returns
    -------
    Keras model
        The Keras model loaded from a file
    """

    return load_keras_model(join("models", filename + ".h5"))
예제 #8
0
파일: utils.py 프로젝트: tgandor/YAD2K
def load_model(model_path, classes_path=None, anchors_path=None):
    model_path = os.path.expanduser(model_path)
    assert model_path.endswith('.h5'), 'Keras model must be a .h5 file.'

    if not os.path.isfile(model_path):
        raise ValueError('Model file: {} - not found'.format(model_path))

    h5_suffix = re.compile(r'\.h5$')

    if anchors_path is None:
        anchors_path = h5_suffix.sub('_anchors.txt', model_path)
    else:
        anchors_path = os.path.expanduser(anchors_path)

    if not os.path.isfile(anchors_path):
        raise ValueError('Anchors file: {} - not found'.format(anchors_path))

    if classes_path is None:
        classes_path = h5_suffix.sub('_classes.txt', model_path)
    else:
        classes_path = os.path.expanduser(classes_path)

    if not os.path.isfile(classes_path):
        raise ValueError('Classes file: {} - not found'.format(classes_path))

    with open(classes_path) as f:
        class_names = f.readlines()
    class_names = [c.strip() for c in class_names]

    with open(anchors_path) as f:
        anchors = f.readline()
        anchors = [float(x) for x in anchors.split(',')]
        anchors = np.array(anchors).reshape(-1, 2)

    yolo_model = load_keras_model(model_path, compile=False)

    # Verify model, anchors, and classes are compatible
    num_classes = len(class_names)
    num_anchors = len(anchors)
    # TODO: Assumes dim ordering is channel last
    model_output_channels = yolo_model.layers[-1].output_shape[-1]
    assert model_output_channels == num_anchors * (num_classes + 5), (
        'Output channels ({}) != anchors ({}) * {} ({} classes + 5 params)'.
        format(model_output_channels, num_anchors, num_classes + 5,
               num_classes))

    return yolo_model, class_names, anchors
예제 #9
0
def load_ae_model(model_file):
    return load_keras_model(model_file,
                            custom_objects={
                                "Dense_tied": Dense_tied,
                                "KCompetitive": KCompetitive
                            })
def load_model():
    """ Load saved model named 'model.h5'
    Note that you need to have the model.h5 somewhere in current directory
    return: instance of ClaimClassifier with model
    """
    return ClaimClassifier(load_keras_model('model.h5'))
def load_model():
    if os.path.exists(model_filename):
        return load_keras_model(model_filename)
    else:
        print("File {} not found!".format(model_filename))
        exit()
def load_model():
    global model
    model = load_keras_model(choose_model())
    model._make_predict_function()
예제 #13
0
def init():
    local_path = '.azureml/share/model.h5'

    global model
    model = load_keras_model(local_path)
예제 #14
0
 def __init__(self, style_transfer_model_name):
     self.path = f'{CLASSIFIER_BASE_PATH}/neural_{style_transfer_model_name}.h5'
     self.classifier = load_keras_model(self.path)
예제 #15
0
    arg('--embeddings', help='Дистрибутивная модель', required=True)
    args = parser.parse_args()

    model_filename = args.modelfile
    embeddings_file = args.embeddings

    logger.info('Загружаем дистрибутивную модель...')
    emb_model = load_model(embeddings_file)
    logger.info('Загрузка дистрибутивной модели завершена')
    vocabulary = emb_model.vocab
    embedding_layer = emb_model.get_keras_embedding()

    classes = ['0', '1', '2']

    max_seq_length = 20  # Паддинг: приводим все документы к этой длине (лишнее обрезаем, недостающее заполняем нулями)

    # Загрузка модели
    print('Загрузка готовой модели')
    model = load_keras_model(model_filename)
    print(model.summary())

    while True:
        text = input('Введите ваш текст: ')
        x = [[get_number(w, vocab=vocabulary) for w in text.split()]]
        vectorized = preprocessing.sequence.pad_sequences(
            x, maxlen=max_seq_length, truncating='post', padding='post')
        pred = model.predict(vectorized)
        print(pred)
        cl = [classes[np.argmax(pred)] for pr in pred]
        print(cl)
예제 #16
0
repository = 'models/'

fh_m = [f for f in os.listdir(repository)]
for f in fh_m:
    if f.startswith(lang) and f.endswith('.h5'):
        model_file = f
    if f.startswith(lang) and f.endswith('.map'):
        w2i = pickle.load(open(repository + f, 'rb'))
print('Loading %s and map ...' % model_file)

maxlen = 1000
batch_size = 64
topk = 10

# logger.info('Load a pre-trained Keras model...')
model = load_keras_model(repository + model_file, custom_objects={"AttentionWeightedAverage": AttentionWeightedAverage})
# print(model.summary())

print('Predicting on:', file=sys.stderr)

mapping = {'A1': 'argument', 'A16': 'info', 'A8': 'news', 'A11': 'personal', 'A17': 'eval', 'A12': 'promotion',
           'A14': 'scitech', 'A9': 'legal', 'A7': 'instruction', 'A4': 'fiction'}

# this is the exact order of the y in the model training setting
ann_order = ['A1', 'A4', 'A7', 'A8', 'A9', 'A11', 'A12', 'A14', 'A16', 'A17']
print('what are we predicting', ann_order, file=sys.stderr)

###########################
### input folder (two-level tree of folders) or file
###########################
rootdir = "/home/u2/resources/corpora/aranea/ref/ru/"
예제 #17
0
def _run(model_file_name, init_function_name, component_type_string,
         target_class, layer_name, neuron_indices, channel_index,
         ideal_activation, num_iterations, learning_rate, output_file_name):
    """Runs backwards optimization on a trained CNN.

    This is effectively the main method.

    :param model_file_name: See documentation at top of file.
    :param init_function_name: Same.
    :param component_type_string: Same.
    :param target_class: Same.
    :param layer_name: Same.
    :param neuron_indices: Same.
    :param channel_index: Same.
    :param ideal_activation: Same.
    :param num_iterations: Same.
    :param learning_rate: Same.
    :param output_file_name: Same.
    """

    model_interpretation.check_component_type(component_type_string)
    if ideal_activation <= 0:
        ideal_activation = None

    print('Reading model from: "{0:s}"...'.format(model_file_name))
    custom_dict = {'brier_skill_score_keras': _brier_skill_score_keras}
    model_object = load_keras_model(model_file_name, custom_objects=custom_dict)

    init_function = _create_initializer(init_function_name)
    print(SEPARATOR_STRING)

    if component_type_string == CLASS_COMPONENT_TYPE_STRING:
        print('Optimizing image for target class {0:d}...'.format(target_class))

        list_of_optimized_matrices, initial_activation, final_activation = (
            backwards_opt.optimize_input_for_class(
                model_object=model_object, target_class=target_class,
                init_function_or_matrices=init_function,
                num_iterations=num_iterations, learning_rate=learning_rate)
        )

    elif component_type_string == NEURON_COMPONENT_TYPE_STRING:
        print('Optimizing image for neuron {0:s} in layer "{1:s}"...'.format(
            str(neuron_indices), layer_name
        ))

        list_of_optimized_matrices, initial_activation, final_activation = (
            backwards_opt.optimize_input_for_neuron(
                model_object=model_object, layer_name=layer_name,
                neuron_indices=neuron_indices,
                init_function_or_matrices=init_function,
                num_iterations=num_iterations, learning_rate=learning_rate,
                ideal_activation=ideal_activation)
        )

    else:
        print('Optimizing image for channel {0:d} in layer "{1:s}"...'.format(
            channel_index, layer_name))

        list_of_optimized_matrices, initial_activation, final_activation = (
            backwards_opt.optimize_input_for_channel(
                model_object=model_object, layer_name=layer_name,
                channel_index=channel_index,
                init_function_or_matrices=init_function,
                stat_function_for_neuron_activations=K.max,
                num_iterations=num_iterations, learning_rate=learning_rate,
                ideal_activation=ideal_activation)
        )

    print(SEPARATOR_STRING)

    print('Denormalizing optimized examples...')
    list_of_optimized_matrices[0] = _denormalize_data(
        list_of_optimized_matrices[0]
    )

    print('Writing results to: "{0:s}"...'.format(output_file_name))
    backwards_opt.write_standard_file(
        pickle_file_name=output_file_name,
        list_of_optimized_matrices=list_of_optimized_matrices,
        initial_activations=numpy.array([initial_activation]),
        final_activations=numpy.array([final_activation]),
        model_file_name=model_file_name,
        init_function_name_or_matrices=init_function_name,
        num_iterations=num_iterations, learning_rate=learning_rate,
        component_type_string=component_type_string, target_class=target_class,
        layer_name=layer_name, neuron_indices=neuron_indices,
        channel_index=channel_index, ideal_activation=ideal_activation)
예제 #18
0
def load_model(model_path: Path, labels_path: Path) -> Sequential:
    if globals()["model"] is None:
        loaded_model = load_keras_model(model_path)
        globals()["model"] = loaded_model
        globals()["labels"] = load_labels(labels_path)
    return globals()["model"], globals()["labels"]
예제 #19
0
def load_vae_model(model_file):
    return load_keras_model(model_file,
                            custom_objects={"KCompetitive": KCompetitive})
def _run(model_file_name, init_function_name, component_type_string,
         target_class, layer_name, neuron_indices, channel_index,
         ideal_activation, num_iterations, learning_rate, output_file_name):
    """Runs backwards optimization on a trained CNN.

    This is effectively the main method.

    :param model_file_name: See documentation at top of file.
    :param init_function_name: Same.
    :param component_type_string: Same.
    :param target_class: Same.
    :param layer_name: Same.
    :param neuron_indices: Same.
    :param channel_index: Same.
    :param ideal_activation: Same.
    :param num_iterations: Same.
    :param learning_rate: Same.
    :param output_file_name: Same.
    """

    model_interpretation.check_component_type(component_type_string)
    if ideal_activation <= 0:
        ideal_activation = None

    print('Reading model from: "{0:s}"...'.format(model_file_name))
    model_object = load_keras_model(
        model_file_name,
        custom_objects={'brier_skill_score_keras': _brier_skill_score_keras})

    init_function = _create_initializer(init_function_name)
    print(SEPARATOR_STRING)

    if component_type_string == CLASS_COMPONENT_TYPE_STRING:
        print(
            'Optimizing image for target class {0:d}...'.format(target_class))

        result_dict = backwards_opt.optimize_input_for_class(
            model_object=model_object,
            target_class=target_class,
            init_function_or_matrices=init_function,
            num_iterations=num_iterations,
            learning_rate=learning_rate)

    elif component_type_string == NEURON_COMPONENT_TYPE_STRING:
        print('Optimizing image for neuron {0:s} in layer "{1:s}"...'.format(
            str(neuron_indices), layer_name))

        result_dict = backwards_opt.optimize_input_for_neuron(
            model_object=model_object,
            layer_name=layer_name,
            neuron_indices=neuron_indices,
            init_function_or_matrices=init_function,
            num_iterations=num_iterations,
            learning_rate=learning_rate,
            ideal_activation=ideal_activation)

    else:
        print('Optimizing image for channel {0:d} in layer "{1:s}"...'.format(
            channel_index, layer_name))

        result_dict = backwards_opt.optimize_input_for_channel(
            model_object=model_object,
            layer_name=layer_name,
            channel_index=channel_index,
            init_function_or_matrices=init_function,
            stat_function_for_neuron_activations=K.max,
            num_iterations=num_iterations,
            learning_rate=learning_rate,
            ideal_activation=ideal_activation)

    print(SEPARATOR_STRING)

    initial_activations = numpy.array(
        [result_dict[backwards_opt.INITIAL_ACTIVATION_KEY]])
    final_activations = numpy.array(
        [result_dict[backwards_opt.FINAL_ACTIVATION_KEY]])

    print('Denormalizing input and output (optimized) example...')
    denorm_input_matrix = _denormalize_data(
        result_dict[backwards_opt.NORM_INPUT_MATRICES_KEY])
    denorm_output_matrix = _denormalize_data(
        result_dict[backwards_opt.NORM_OUTPUT_MATRICES_KEY])

    print('Writing results to: "{0:s}"...'.format(output_file_name))
    bwo_metadata_dict = backwards_opt.check_metadata(
        component_type_string=component_type_string,
        num_iterations=num_iterations,
        learning_rate=learning_rate,
        target_class=target_class,
        layer_name=layer_name,
        ideal_activation=ideal_activation,
        neuron_indices=neuron_indices,
        channel_index=channel_index,
        l2_weight=None,
        radar_constraint_weight=None,
        minmax_constraint_weight=None)

    backwards_opt.write_standard_file(
        pickle_file_name=output_file_name,
        denorm_input_matrices=[denorm_input_matrix],
        denorm_output_matrices=[denorm_output_matrix],
        initial_activations=initial_activations,
        final_activations=final_activations,
        model_file_name=model_file_name,
        metadata_dict=bwo_metadata_dict,
        full_storm_id_strings=None,
        storm_times_unix_sec=None,
        sounding_pressure_matrix_pa=None)