예제 #1
0
    def test_save_pearson_similarity(self):
        # Should be 1
        print "Testing %s vs. %s: same images, different ids" % (self.pk1,
                                                                 self.pk1_copy)
        save_voxelwise_pearson_similarity(self.pk1, self.pk1_copy)

        # Should not be saved
        with self.assertRaises(Exception):
            print "Testing %s vs. %s: same pks, success is raising exception" % (
                self.pk1, self.pk1)
            save_voxelwise_pearson_similarity(self.pk1, self.pk1)

        print "Testing %s vs. %s, different image set 1" % (self.pk1, self.pk2)
        save_voxelwise_pearson_similarity(self.pk1, self.pk2)

        print "Testing %s vs. %s, different image set 2" % (self.pk2, self.pk3)
        save_voxelwise_pearson_similarity(self.pk2, self.pk3)

        # Should not exist
        print "Success for this test means there are no comparisons returned."
        image1, image1_copy = get_images_by_ordered_id(self.pk1, self.pk1)
        comparison = Comparison.objects.filter(
            image1=image1,
            image2=image1_copy,
            similarity_metric=self.pearson_metric)
        self.assertEqual(len(comparison), 0)

        # Should be 1
        print "Success for this test means a score of 1.0"
        image1, image2 = get_images_by_ordered_id(self.pk1, self.pk1_copy)
        comparison = Comparison.objects.filter(
            image1=image1,
            image2=image2,
            similarity_metric=self.pearson_metric)
        self.assertEqual(len(comparison), 1)
        self.assertAlmostEqual(comparison[0].similarity_score, 1.0)

        print "Success for the remaining tests means a specific comparison score."
        image1, image2 = get_images_by_ordered_id(self.pk1, self.pk2)
        comparison = Comparison.objects.filter(
            image1=image1,
            image2=image2,
            similarity_metric=self.pearson_metric)
        self.assertEqual(len(comparison), 1)
        print comparison[0].similarity_score
        assert_almost_equal(comparison[0].similarity_score,
                            0.214495998015581,
                            decimal=5)

        image2, image3 = get_images_by_ordered_id(self.pk3, self.pk2)
        comparison = Comparison.objects.filter(
            image1=image2,
            image2=image3,
            similarity_metric=self.pearson_metric)
        self.assertEqual(len(comparison), 1)
        print comparison[0].similarity_score
        assert_almost_equal(comparison[0].similarity_score,
                            0.312548260435768,
                            decimal=5)
예제 #2
0
    def test_save_pearson_similarity(self):
        # Should be 1
        print "Testing %s vs. %s: same images, different ids" %(self.pk1,self.pk1_copy)
        save_voxelwise_pearson_similarity(self.pk1,self.pk1_copy)
 
        # Should not be saved
        with self.assertRaises(Exception):
            print "Testing %s vs. %s: same pks, success is raising exception" %(self.pk1,self.pk1)
            save_voxelwise_pearson_similarity(self.pk1,self.pk1)

        print "Testing %s vs. %s, different image set 1" %(self.pk1,self.pk2)
        save_voxelwise_pearson_similarity(self.pk1,self.pk2)

        print "Testing %s vs. %s, different image set 2" %(self.pk2,self.pk3)
        save_voxelwise_pearson_similarity(self.pk2,self.pk3)

        # Should not exist
        print "Success for this test means there are no comparisons returned."
        image1, image1_copy = get_images_by_ordered_id(self.pk1, self.pk1)
        comparison = Comparison.objects.filter(image1=image1,image2=image1_copy,similarity_metric=self.pearson_metric)
        self.assertEqual(len(comparison), 0)

        # Should be 1        
        print "Success for this test means a score of 1.0"
        image1, image2 = get_images_by_ordered_id(self.pk1, self.pk1_copy)
        comparison = Comparison.objects.filter(image1=image1,image2=image2,similarity_metric=self.pearson_metric)
        self.assertEqual(len(comparison), 1)
        self.assertAlmostEqual(comparison[0].similarity_score, 1.0)

        print "Success for the remaining tests means a specific comparison score."
        image1, image2 = get_images_by_ordered_id(self.pk1, self.pk2)
        comparison = Comparison.objects.filter(image1=image1,image2=image2,similarity_metric=self.pearson_metric)
        self.assertEqual(len(comparison), 1)
        print comparison[0].similarity_score
        assert_almost_equal(comparison[0].similarity_score, 0.214495998015581,decimal=5)

        image2, image3 = get_images_by_ordered_id(self.pk3, self.pk2)
        comparison = Comparison.objects.filter(image1=image2,image2=image3,similarity_metric=self.pearson_metric)
        self.assertEqual(len(comparison), 1)
        print comparison[0].similarity_score
        assert_almost_equal(comparison[0].similarity_score, 0.312548260436,decimal=5)
예제 #3
0
import errno

# Images should have the "transform" field after applying migrations (I think)

# First create/update the image similarity metric
pearson_metric = Similarity.objects.update_or_create(
    similarity_metric="pearson product-moment correlation coefficient",
    transformation="voxelwise",
    metric_ontology_iri=
    "http://webprotege.stanford.edu/RCS8W76v1MfdvskPLiOdPaA",
    transformation_ontology_iri=
    "http://webprotege.stanford.edu/R87C6eFjEftkceScn1GblDL")

# Delete all old comparisons
all_comparisons = Comparison.objects.all()
all_comparisons.delete()

all_images = Image.objects.filter(
    collection__private=False and is_thresholded == False).exclude(
        polymorphic_ctype__model__in=['image', 'atlas'])

# Filter down to images that are not private, not thresholded
# Now, we need to generate a "comparison" object for all files in the database
# We will use a celery task (as this will be integrated into upload workflow)
for image1 in all_images:
    for image2 in all_images:
        if image1.pk < image2.pk:
            print "Calculating pearson similarity for images %s and %s" % (
                image1, image2)
            save_voxelwise_pearson_similarity(image1.pk, image2.pk)
@author: vsochat
'''
import os
import django
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "neurovault.settings")
django.setup()

from neurovault.apps.statmaps.models import Similarity, Comparison, Image
from neurovault.apps.statmaps.tasks import save_voxelwise_pearson_similarity
from django.db import IntegrityError
import errno

# First create the image similarity metric
pearson_metric = Similarity(similarity_metric="pearson product-moment correlation coefficient",
                                     transformation="voxelwise",
                                     metric_ontology_iri="http://webprotege.stanford.edu/RCS8W76v1MfdvskPLiOdPaA",
                                     transformation_ontology_iri="http://webprotege.stanford.edu/R87C6eFjEftkceScn1GblDL")
try:
  pearson_metric.save()
except IntegrityError as exc:
  print "A Similarity Metric has already been defined for %s" %(pearson_metric)
  pass 

# Now, we need to generate a "comparison" object for all files in the database
# We will use a celery task (as this will be integrated into upload workflow)
for image1 in Image.objects.filter(collection__private=False):
  for image2 in Image.objects.filter(collection__private=False):
    if image1.pk < image2.pk:
      print "Calculating pearson similarity for images %s and %s" %(image1,image2)
      save_voxelwise_pearson_similarity(image1.pk,image2.pk)