Example #1
0
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu May  7 13:56:27 2020

@author: sandeepkompella
"""

import turicreate as tc

image_data = tc.SFrame('image_train_data')
print(image_data['image'].explore())
print(image_data)

sketch = tc.Sketch(image_data['label'])

knn_model = tc.nearest_neighbors.create(image_data,
                                        features=['deep_features'],
                                        label='id')
cat = image_data[18:19]
print(cat['image'].explore())
knn_model.query(cat)


def get_images_from_ids(query_result):
    return image_data.filter_by(query_result['reference_label'], 'id')


cat_neighbors = get_images_from_ids(knn_model.query(cat))
print(cat_neighbors['image'].explore())
car = image_data[8:9]
Example #2
0
def skeatch_frequent_items(df):
    return tc.Sketch(df['passenger_count']).frequent_items()
Example #3
0
def skeatch_standatd_deviation(df):
    return tc.Sketch(df['fare_amount']).std()
Example #4
0
def skeatch_mean(df):
    return tc.Sketch(df['fare_amount']).mean()
Example #5
0
car = image_train[8:90]
car['image'][0].show()
get_images_from_ids(knn_model.query(car))['image'][0:1].show()


def show_neighbors(i):
    get_images_from_ids(knn_model.query(image_train[i:i +
                                                    1]))['image'][1].show()


image_train[24:25]['image'][0].show()
show_neighbors(24)

# deep features for image retrieval assignment
sketch = tc.Sketch(image_train['label'])

cat_data = image_train[image_train['label'] == 'cat']
dog_data = image_train[image_train['label'] == 'dog']
car_data = image_train[image_train['label'] == 'automobile']
bird_data = image_train[image_train['label'] == 'bird']

cat_model = tc.nearest_neighbors.create(cat_data,
                                        features=['deep_features'],
                                        label='id')
dog_model = tc.nearest_neighbors.create(dog_data,
                                        features=['deep_features'],
                                        label='id')
car_model = tc.nearest_neighbors.create(car_data,
                                        features=['deep_features'],
                                        label='id')
Example #6
0
image_test = turicreate.SFrame('/content/drive/My Drive/Colab Notebooks/Machine Learning/Week6/Practice/image_test_data/')

"""# **Train a nearest-neighbour model for retrieving images using deep featres**"""

knn_model = turicreate.nearest_neighbors.create(image_train,
                                                features=['deep_features'],
                                                label = 'id')

"""# **Task 1: Compute summary statistics of the data**
Sketch summaries are techniques for computing summary statistics of data very quickly. In GraphLab Create, SFrames and SArrays include a method:

.sketch_summary()
which computes such summary statistics. Using the training data, compute the sketch summary of the ‘label’ column and interpret the results. What’s the least common category in the training data?
"""

sketch = turicreate.Sketch(image_train['label'])

sketch_summary = image_train['label'].summary()

sketch_summary

"""# **Task 2: Create category-specific image retrieval models**
In most retrieval tasks, the data we have is unlabeled, thus we call these unsupervised learning problems. However, we have labels in this image dataset, and will use these to create one model for each of the 4 image categories, {‘dog’,’cat’,’automobile’,bird’}. To start, follow these steps:


Similarly to the image retrieval notebook you downloaded, you are going to create a nearest neighbor model using the 'deep_features' as the features, but this time create one such model for each category, using the training_data. You can call the model with the ‘dog’ data the dog_model, the one with the ‘cat’ data the cat_model, as so on. You now have a nearest neighbors model that can find the nearest ‘dog’ to any image you give it, the dog_model; one that can find the nearest ‘cat’, the cat_model; and so on.


*   Split the SFrame with the training data into 4 different SFrames. Each of these will contain data for 1 of the 4 categories above. Hint: if you use a logical filter to select the rows where the ‘label’ column equals ‘dog’, you can create an SFrame with only the data for images labeled ‘dog’.

*   Similarly to the image retrieval notebook you downloaded, you are going to create a nearest neighbor model using the 'deep_features' as the features, but this time create one such model for each category, using the training_data. You can call the model with the ‘dog’ data the dog_model, the one with the ‘cat’ data the cat_model, as so on. You now have a nearest neighbors model that can find the nearest ‘dog’ to any image you give it, the dog_model; one that can find the nearest ‘cat’, the cat_model; and so on.