Beispiel #1
0
def run(tweets_train, tweets_test, evaluation=False, retweets=True, k=3):
    reload(statistics)

    print('Computing features...')
    stats = statistics.compute_statistics(tweets_train, tweets_test, k=k)
    train_features, train_labels = statistics.prepare_train_features(tweets_train, stats, retweets)
    test_features = statistics.prepare_test_features(tweets_test, stats, retweets)

    if evaluation:
        np.savetxt('eval_train_features.csv', train_features, delimiter=",")
        np.savetxt('eval_train_labels.csv', train_labels, delimiter=",")
        np.savetxt('eval_test_features.csv', test_features, delimiter=",")
    else:
        np.savetxt('train_features.csv', train_features, delimiter=",")
        np.savetxt('train_labels.csv', train_labels, delimiter=",")
        np.savetxt('test_features.csv', test_features, delimiter=",")

    print('Learning model...')
    lr_model = model.learn_model(train_features, train_labels)
    probabilities = model.apply_model(test_features, lr_model)

    print('Preparing solution...')
    if evaluation:
        solution.prepare_solutions_for_evaluation(tweets_test, probabilities[:, 0])
    else:
        solution.prepare_solutions(tweets_test, probabilities[:, 0])
Beispiel #2
0
def test(test_data, parameters):
    NSS_scores = []
    for fn, (image, fixations) in test_data.items():
        prediction = apply_model(image, parameters)
        NSS = calculate_NSS(prediction, fixations)
        NSS_scores.append(NSS)
    return np.mean(NSS_scores)
Beispiel #3
0
 def magic(self):
     # do magic
     self.image.save('image.jpg')
     img = cv2.imread('image.jpg', 0)
     img = crop(img)
     prid = apply_model(img)
     #print("predicted value is : ", prid)
     my_label = tk.Label(root, text=prid, font=("Arial Bold", 150))
     my_label.config(bg="black")
     my_label.place(x=50, y=480, anchor='sw')
Beispiel #4
0
def display_NSS_calculation(image, fixations, parameters, figure=None, title=None):
    prediction = apply_model(image, parameters)

    plt.figure(1, (15,6))
    plt.subplot(1,3,1)
    plt.title('(a) Stimulus')
    plt.imshow(image)

    # want hunk_size sized bins; want to cover full data range
    normalized = (prediction-tf.reduce_mean(prediction)) / tf.math.reduce_std(prediction)
    NSS_values = [normalized[x, y] for x, y in fixations]
    NSS = np.mean(NSS_values)

    hunk_size = 1
    smallest = np.min(normalized)//hunk_size * hunk_size
    biggest = (np.max(normalized)//hunk_size + 2) * hunk_size
    hunks = np.arange(smallest,biggest,hunk_size)

    axes = plt.subplot(1,3,2)
    plt.title('(b) Predicted salience map (normalized)\n and actual fixation points')
    plt.imshow(normalized, cmap='Greys_r', vmin=-np.max(np.abs(normalized)), vmax=np.max(np.abs(normalized)))
    contour = plt.contour(np.flip(normalized, axis=0), hunks, colors='k', linewidths=1, origin='image')
    if len(fixations):
        plt.scatter(fixations[:,1], fixations[:,0], marker='.')
    axes.clabel(contour, fmt='%1d', colors='#006600', inline=1, fontsize=9)

    axes = plt.subplot(1,3,3)
    plt.title(f'(c) Calculating normalized scanpath saliency:\nhistogram and mean (NSS = {NSS:.2f})')
    plt.hist(NSS_values, bins=hunks)
    axes.axvline(x=NSS, color='black', linestyle='--')
    axes.set_xticks(hunks)
    axes.set_xticklabels([str(i) for i in hunks])
    axes.set_aspect(1.0/axes.get_data_ratio()) # unfortunately needed for square subplot

    if title is not None:
        plt.suptitle(f'{title}\n(NSS: {NSS:.4f})')

    if figure is not None:
        plt.savefig(figure, bbox_inches='tight')
    else:
        plt.show()
    plt.close()

    return NSS
 def __call__(self, image, parameters):
     return apply_model(image, parameters, js_safe=True)
Beispiel #6
0
def parameter_gradient(image_sample, fixations, parameters):
    prediction = apply_model(image_sample, parameters)
    NSS = calculate_NSS(prediction, fixations)
    # gradients will return a list with a single element: [d(-NSS)/d(parameters)]
    grad = tf.gradients(-NSS, [parameters])[0]
    return NSS, grad
Beispiel #7
0
        np.save('params.npy', parameters.numpy())
    else:
        parameters.assign(np.load('params.npy'))

    if testing:
        print('Test NSS:', test(test_data, parameters))

    NSS_scores = []

    if len(sys.argv) > 1:
        for filename in sys.argv[1:]:
            unresized_image = tf.image.decode_image(
                open(filename, 'rb').read())
            image = tf.image.resize(unresized_image, (224, 224),
                                    antialias=True) / 255
            prediction = apply_model(image, parameters)
            safe_filename = filename.replace('/', '_')
            np.save('out/' + safe_filename + '.npy', prediction.numpy())
            display_NSS_calculation(image, [],
                                    parameters,
                                    figure='out/' + safe_filename + '.pdf',
                                    title=filename)
    else:
        all_NSS = []
        for filename in test_data.keys():
            image, fixations = test_data[filename]

            figure_out = 'out/' + filename + '.pdf'
            NSS = display_NSS_calculation(image,
                                          fixations,
                                          parameters,
Beispiel #8
0
import os
import json
from model import apply_model, load_model


models = os.listdir('../models')

input_string = input()
for i in models:
    print(i)
    model = load_model(os.path.join('../models', i))
    data = json.loads(input_string)
    pred, time = apply_model(model, data)
    print(time)