Exemple #1
0
def index_images(folder, features_path, mapping_path, model, features_from_new_model_boolean, glove_path):
    print ("Now indexing images...")
    # Use word vectors if leveraging the new model
    if features_from_new_model_boolean:
        word_vectors=vector_search.load_glove_vectors(glove_path)
    else:
        word_vectors=[]
    # Use utiliy function
    _, _, paths = load_paired_img_wrd(
        folder=folder, 
        word_vectors=word_vectors,
        use_word_vectors=features_from_new_model_boolean)
    images_features, file_index = vector_search.generate_features(paths, model)
    vector_search.save_features(features_path, images_features, mapping_path, file_index)
    return images_features, file_index
Exemple #2
0
    st.write("""
    We are now going to load a model that was **pre-trained** on a large data set (imagenet), and is freely available
     online.
    
    We use this model to generate **embeddings** for our images.
    
    As you can see below, once we've used the model to generate image features, we can then **store them to disk** 
    and re-use them without needing to do inference again! This is one of the reason that embeddings are so popular 
    in practical applications, as they allow for huge efficiency gains. 
    """)

    with st.echo():
        model = vector_search.load_headless_pretrained_model()
        if generate_image_features:
            print ("Generating image features...")
            images_features, file_index = vector_search.generate_features(image_paths, model)
            vector_search.save_features(features_path, images_features, file_mapping_path, file_index)
        else:
            images_features, file_index = vector_search.load_features(features_path, file_mapping_path)

    st.write("Our model is simply VGG16 without the last layer (softmax)")
    st.image(Image.open('assets/vgg16_architecture.jpg'), width=800, caption="Original VGG. Credit to Data Wow Blog")
    st.image(Image.open('assets/vgg16_chopped.jpg'), width=800, caption="Our model")
    st.write("This is how we get such a model in practice")
    show_source(vector_search.load_headless_pretrained_model)

    st.write("""
    What do we mean by generating embeddings? Well we just use our pre-trained model up to the penultimate layer, and 
    store the value of the activations.""")
    show_source(vector_search.generate_features)
Exemple #3
0
def index_images(folder, features_path, mapping_path, model):
    _, _, paths = load_paired_img_wrd(folder, [], use_word_vectors=False)
    images_features, file_index = vector_search.generate_features(paths, model)
    vector_search.save_features(features_path, images_features, mapping_path,
                                file_index)
    return images_features, file_index