Example #1
0
def main(args, facebook_id, facebook_token):
    # There are three function choices: browse, build, like
    # browse: review new tinder profiles and store them in your database
    # train: use machine learning to create a new model that likes and dislikes profiles based on your historical preference
    # like: use your machine leanring model to like new tinder profiles
    if args.function == 'browse':
        my_sess = client(facebook_id, facebook_token, args.distance)
        my_sess.browse()

    elif args.function == 'train':
        # align the database
        tindetheus_align.main()
        # export the embeddinggs from the aligned database
        export_embeddings.main()
        # calculate the 128 average embedding per profiles
        X, y = calc_avg_emb()
        # fit and save a logistic regression model to the database
        fit_log_reg(X, y)

    elif args.function == 'like':
        my_sess = client(facebook_id, facebook_token, args.distance)
        my_sess.like()

    else:
        text = '''You must specify a function. Your choices are either
tindetheus browse
tindetheus train
tindetheus like'''
        print(text)
def main(args, facebook_token):
    # There are three function choices: browse, build, like
    # browse: review new tinder profiles and store them in your database
    # train: use machine learning to create a new model that likes and dislikes
    # profiles based on your historical preference
    # like: use your machine leanring model to like new tinder profiles
    if args.function == 'browse':
        my_sess = client(facebook_token, args.distance, args.model_dir,
                         likes_left=args.likes)
        my_sess.browse()

    elif args.function == 'train':
        # align the database
        tindetheus_align.main()
        # export the embeddings from the aligned database
        export_embeddings.main(model_dir=args.model_dir,
                               image_batch=args.image_batch)
        # calculate the n average embedding per profiles
        X, y = calc_avg_emb()
        # fit and save a logistic regression model to the database
        fit_log_reg(X, y)

    elif args.function == 'validate':
        print('\n\nAttempting to validate the dataset...\n\n')
        valdir = 'validation'
        # align the validation dataset
        tindetheus_align.main(input_dir=valdir,
                              output_dir=valdir+'_aligned')
        # export embeddings
        # y is the image list, X is the embedding_array
        image_list, emb_array = export_embeddings.main(model_dir=args.model_dir,  # noqa: E501
                                        data_dir=valdir+'_aligned',
                                        image_batch=args.image_batch,
                                        embeddings_name='val_embeddings.npy',
                                        labels_name='val_labels.npy',
                                        labels_strings_name='val_label_strings.npy',  # noqa: E501
                                        return_image_list=True)
        # print(image_list)
        # convert the image list to a numpy array to take advantage of
        # numpy array slicing
        image_list = np.array(image_list)
        print('\n\nEvaluating trained model\n \n')
        model = joblib.load('log_reg_model.pkl')
        yhat = model.predict(emb_array)
        # print(yhat)
        # 0 should be dislike, and 1 should be like
        # if this is backwards, there is probablly a bug...
        dislikes = yhat == 0
        likes = yhat == 1
        show_images(image_list[dislikes], holdon=True, title='Dislike')
        print('\n\nGenerating plots...\n\n')
        plt.title('Dislike')

        show_images(image_list[likes], holdon=True, title='Like')
        plt.title('Like')

        cols = ['Image name', 'Model prediction (0=Dislike, 1=Like)']
        results = np.array((image_list, yhat)).T
        print('\n\nSaving results to validation.csv\n\n')
        my_results_DF = pd.DataFrame(results, columns=cols)
        my_results_DF.to_csv('validation.csv')

        plt.show()

    elif args.function == 'like':
        print('... Loading the facenet model ...')
        print('... be patient this may take some time ...')
        with tf.Graph().as_default():
            with tf.Session() as sess:
                # pass the tf session into client object
                my_sess = client(facebook_token, args.distance, args.model_dir,
                                 likes_left=args.likes, tfsess=sess)
                # Load the facenet model
                facenet.load_model(my_sess.model_dir)
                print('Facenet model loaded successfully!!!')
                # automatically like users
                my_sess.like()

    else:
        text = '''You must specify a function. Your choices are either
tindetheus browse
tindetheus train
tindetheus like
tindetheus validate'''
        print(text)
Example #3
0
    def like_or_dislike_users(self, users):
        # automatically like or dislike users based on your previously trained
        # model on your historical preference.

        # facenet settings from export_embeddings....
        model_dir = '20170512-110547'
        data_dir = 'temp_images_aligned'
        embeddings_name = 'temp_embeddings.npy'
        labels_name = 'temp_labels.npy'
        labels_strings_name = 'temp_label_strings.npy'
        is_aligned = True
        image_size = 160
        margin = 44
        gpu_memory_fraction = 1.0
        image_batch = 1000
        with tf.Graph().as_default():
            with tf.Session() as sess:
                # Load the facenet model
                facenet.load_model(model_dir)
                for user in users:
                    clean_temp_images()
                    urls = user.get_photos(width='640')
                    image_list = download_url_photos(urls,
                                                     user.id,
                                                     is_temp=True)
                    # align the database
                    tindetheus_align.main(input_dir='temp_images',
                                          output_dir='temp_images_aligned')
                    # export the embeddinggs from the aligned database

                    train_set = facenet.get_dataset(data_dir)
                    image_list_temp, label_list = facenet.get_image_paths_and_labels(
                        train_set)
                    label_strings = [
                        name
                        for name in os.listdir(os.path.expanduser(data_dir))
                        if os.path.isdir(
                            os.path.join(os.path.expanduser(data_dir), name))
                    ]

                    # Get input and output tensors
                    images_placeholder = tf.get_default_graph(
                    ).get_tensor_by_name("input:0")
                    embeddings = tf.get_default_graph().get_tensor_by_name(
                        "embeddings:0")
                    phase_train_placeholder = tf.get_default_graph(
                    ).get_tensor_by_name("phase_train:0")

                    # Run forward pass to calculate embeddings
                    nrof_images = len(image_list_temp)
                    print('Number of images: ', nrof_images)
                    batch_size = image_batch
                    if nrof_images % batch_size == 0:
                        nrof_batches = nrof_images // batch_size
                    else:
                        nrof_batches = (nrof_images // batch_size) + 1
                    print('Number of batches: ', nrof_batches)
                    embedding_size = embeddings.get_shape()[1]
                    emb_array = np.zeros((nrof_images, embedding_size))
                    start_time = time.time()

                    for i in range(nrof_batches):
                        if i == nrof_batches - 1:
                            n = nrof_images
                        else:
                            n = i * batch_size + batch_size
                        # Get images for the batch
                        if is_aligned is True:
                            images = facenet.load_data(
                                image_list_temp[i * batch_size:n], False,
                                False, image_size)
                        else:
                            images = load_and_align_data(
                                image_list_temp[i * batch_size:n], image_size,
                                margin, gpu_memory_fraction)
                        feed_dict = {
                            images_placeholder: images,
                            phase_train_placeholder: False
                        }
                        # Use the facenet model to calcualte embeddings
                        embed = sess.run(embeddings, feed_dict=feed_dict)
                        emb_array[i * batch_size:n, :] = embed
                        print('Completed batch', i + 1, 'of', nrof_batches)

                    run_time = time.time() - start_time
                    print('Run time: ', run_time)

                    #   export emedings and labels
                    label_list = np.array(label_list)

                    np.save(embeddings_name, emb_array)

                    if emb_array.size > 0:
                        # calculate the 128 average embedding per profiles
                        X = calc_avg_emb_temp(emb_array)
                        # ealuate on the model
                        yhat = self.model.predict(X)

                        if yhat[0] == 1:
                            didILike = 'Like'
                        else:
                            didILike = 'Dislike'
                    else:
                        # there were no faces in this profile
                        didILike = 'Dislike'
                    print(
                        '********************************************************'
                    )
                    print(user.name, user.age, didILike)
                    print(
                        '********************************************************'
                    )

                    dbase_names = move_images_temp(image_list, user.id)

                    if didILike == 'Like':
                        print(user.like())
                        self.likes_left -= 1
                    else:
                        print(user.dislike())
                    userList = [
                        user.id, user.name, user.age, user.bio,
                        user.distance_km, user.jobs, user.schools,
                        user.get_photos(width='640'), dbase_names, didILike
                    ]
                    self.al_database.append(userList)
                    np.save('al_database.npy', self.al_database)
                    clean_temp_images_aligned()
    def like_or_dislike_users(self, users):
        # automatically like or dislike users based on your previously trained
        # model on your historical preference.

        # facenet settings from export_embeddings....
        data_dir = 'temp_images_aligned'
        embeddings_name = 'temp_embeddings.npy'
        # labels_name = 'temp_labels.npy'
        # labels_strings_name = 'temp_label_strings.npy'
        is_aligned = True
        image_size = 160
        margin = 44
        gpu_memory_fraction = 1.0
        image_batch = 1000
        prev_user = None
        for user in users:
            clean_temp_images()
            urls = user.get_photos(width='640')
            image_list = download_url_photos(urls, user.id,
                                             is_temp=True)
            # align the database
            tindetheus_align.main(input_dir='temp_images',
                                  output_dir='temp_images_aligned')
            # export the embeddings from the aligned database

            train_set = facenet.get_dataset(data_dir)
            image_list_temp, label_list = facenet.get_image_paths_and_labels(train_set)  # noqa: E501

            # Get input and output tensors
            images_placeholder = tf.get_default_graph().get_tensor_by_name("input:0")  # noqa: E501
            embeddings = tf.get_default_graph().get_tensor_by_name("embeddings:0")  # noqa: E501
            phase_train_placeholder = tf.get_default_graph().get_tensor_by_name("phase_train:0")  # noqa: E501

            # Run forward pass to calculate embeddings
            nrof_images = len(image_list_temp)
            print('Number of images: ', nrof_images)
            batch_size = image_batch
            if nrof_images % batch_size == 0:
                nrof_batches = nrof_images // batch_size
            else:
                nrof_batches = (nrof_images // batch_size) + 1
            print('Number of batches: ', nrof_batches)
            embedding_size = embeddings.get_shape()[1]
            emb_array = np.zeros((nrof_images, embedding_size))
            start_time = time.time()

            for i in range(nrof_batches):
                if i == nrof_batches - 1:
                    n = nrof_images
                else:
                    n = i*batch_size + batch_size
                # Get images for the batch
                if is_aligned is True:
                    images = facenet.load_data(image_list_temp[i*batch_size:n],  # noqa: E501
                                                False, False,
                                                image_size)
                else:
                    images = load_and_align_data(image_list_temp[i*batch_size:n],  # noqa: E501
                                                    image_size, margin,
                                                    gpu_memory_fraction)
                feed_dict = {images_placeholder: images,
                             phase_train_placeholder: False}
                # Use the facenet model to calculate embeddings
                embed = self.sess.run(embeddings, feed_dict=feed_dict)
                emb_array[i*batch_size:n, :] = embed
                print('Completed batch', i+1, 'of', nrof_batches)

            run_time = time.time() - start_time
            print('Run time: ', run_time)

            # export embeddings and labels
            label_list = np.array(label_list)

            np.save(embeddings_name, emb_array)

            if emb_array.size > 0:
                # calculate the n average embedding per profiles
                X = calc_avg_emb_temp(emb_array)
                # evaluate on the model
                yhat = self.model.predict(X)

                if yhat[0] == 1:
                    didILike = 'Like'
                    # check to see if this is the same user as before
                    if prev_user == user.id:
                        clean_temp_images_aligned()
                        print('\n\n You have already liked this user!!! \n \n')
                        print('This typically means you have used all of your'
                              ' free likes. Exiting program!!! \n\n')
                        self.likes_left = -1
                        return
                    else:
                        prev_user = user.id
                else:
                    didILike = 'Dislike'
            else:
                # there were no faces in this profile
                didILike = 'Dislike'
            print('**************************************************')
            print(user.name, user.age, didILike)
            print('**************************************************')

            dbase_names = move_images_temp(image_list, user.id)

            if didILike == 'Like':
                print(user.like())
                self.likes_left -= 1
            else:
                print(user.dislike())
            userList = [user.id, user.name, user.age, user.bio,
                        user.distance_km, user.jobs, user.schools,
                        user.get_photos(width='640'), dbase_names,
                        didILike]
            self.al_database.append(userList)
            np.save('al_database.npy', self.al_database)
            clean_temp_images_aligned()