Exemplo n.º 1
0
    def setUpClass(cls):
        cls.test_path = os.path.abspath(os.path.dirname(__file__))
        cls.user, _ = User.objects.get_or_create(username='******')
        cls.client = Client()
        cls.client.login(username=cls.user)
        cls.Collection1 = Collection(name='Collection1', owner=cls.user)
        cls.Collection1.save()

        nii_path = os.path.join(
            cls.test_path, cls._map)
        map = save_statmap_form(
            image_path=nii_path, collection=cls.Collection1)
        save_resampled_transformation_single(map.pk)
        response = json.loads(cls.client.get("/images/%d/gene_expression/json?mask=full" % map.pk, follow=True).content)
        cls.df = pd.DataFrame(response["data"], columns=response["columns"])
Exemplo n.º 2
0
 def test_interpolated_transform_zeros(self):
     img = save_resampled_transformation_single(self.pknan,
                                                resample_dim=[4, 4, 4])
     data = numpy.load(img.reduced_representation.file)
     print "Does transformation calculation maintain NaN values?: %s" % (
         numpy.isnan(data).any())
     assert_equal(numpy.isnan(data).any(), True)
Exemplo n.º 3
0
def createFeatures(subjects=None, resample_dim=[4, 4, 4]):
    clearDB()
    if os.path.isfile('/code/neurovault/apps/statmaps/tests/features'+str(subjects)+str(resample_dim)+'.npy'): #and subjects == None:
        return np.load('/code/neurovault/apps/statmaps/tests/features'+str(subjects)+str(resample_dim)+'.npy').T, \
               pickle.load(open('/code/neurovault/apps/statmaps/tests/dict_feat'+str(subjects)+str(resample_dim)+'.p',"rb" ))
    else:
        feature_dimension = get_feature_dimension(resample_dim)
        features = np.empty([feature_dimension, subjects]) #4*4*4 = 28549 #16*16*16 = 450
        dict_feat = {}
        u1 = User.objects.create(username='******')
        for i, file in enumerate(os.listdir('/code/neurovault/apps/statmaps/tests/bench/images/')):
            # print 'Adding subject ' + file
            print i
            randomCollection = Collection(name='random' + file, owner=u1, DOI='10.3389/fninf.2015.00008' + str(i))
            randomCollection.save()
            image = save_statmap_form(image_path=os.path.join('/code/neurovault/apps/statmaps/tests/bench/images/', file),
                                      collection=randomCollection, image_name=file, ignore_file_warning=True)
            if not image.reduced_representation or not os.path.exists(image.reduced_representation.path):
                image = save_resampled_transformation_single(image.pk, resample_dim)
            features[:, i] = np.load(image.reduced_representation.file)
            dict_feat[i] = int(file.split(".")[0])
            if i == subjects-1:
                features[np.isnan(features)] = 0
                np.save('/code/neurovault/apps/statmaps/tests/features'+str(subjects)+str(resample_dim)+'.npy', features)
                pickle.dump(dict_feat,open('/code/neurovault/apps/statmaps/tests/dict_feat'+str(subjects)+str(resample_dim)+'.p',"wb" ))
                return features.T, dict_feat
Exemplo n.º 4
0
    def setUpClass(cls):
        cls.test_path = os.path.abspath(os.path.dirname(__file__))
        cls.user, _ = User.objects.get_or_create(username='******')
        cls.client = Client()
        cls.client.login(username=cls.user)
        cls.Collection1 = Collection(name='Collection1', owner=cls.user)
        cls.Collection1.save()

        nii_path = os.path.join(cls.test_path, cls._map)
        map = save_statmap_form(image_path=nii_path,
                                collection=cls.Collection1)
        save_resampled_transformation_single(map.pk)
        response = json.loads(
            cls.client.get("/images/%d/gene_expression/json" % map.pk,
                           follow=True).content)
        cls.df = pd.DataFrame(response["data"], columns=response["columns"])
Exemplo n.º 5
0
def get_feature_dimension(resample_dim):
    u1 = User.objects.create(username='******'+str(resample_dim))
    for file in os.listdir('/code/neurovault/apps/statmaps/tests/bench/images/'):
        randomCollection = Collection(name='random' + file, owner=u1, DOI='10.3389/fninf.2015.00008' + file)
        randomCollection.save()
        image = save_statmap_form(image_path=os.path.join('/code/neurovault/apps/statmaps/tests/bench/images/', file),
                                  collection=randomCollection, image_name=file, ignore_file_warning=True)
        if not image.reduced_representation or not os.path.exists(image.reduced_representation.path):
            image = save_resampled_transformation_single(image.pk, resample_dim)
        feature = np.load(image.reduced_representation.file)
        dimension = feature.shape[0]
        clearDB()
        break
    return dimension
Exemplo n.º 6
0
 def test_interpolated_transform_zeros(self): 
     img = save_resampled_transformation_single(self.pknan, resample_dim=[4, 4, 4])
     data = numpy.load(img.reduced_representation.file)
     print "Does transformation calculation maintain NaN values?: %s" %(numpy.isnan(data).any())
     assert_equal(numpy.isnan(data).any(),True)
Exemplo n.º 7
0
def compare_images(request,pk1,pk2):
    import numpy as np
    image1 = get_image(pk1,None,request)
    image2 = get_image(pk2,None,request)
    images = [image1,image2]

    # Get image: collection: [map_type] names no longer than ~125 characters
    image1_custom_name = format_image_collection_names(image_name=image1.name,
                                                       collection_name=image1.collection.name,
                                                       map_type=image1.map_type,total_length=125)
    image2_custom_name = format_image_collection_names(image_name=image2.name,
                                                       collection_name=image2.collection.name,
                                                       map_type=image2.map_type,total_length=125)

    image_names = [image1_custom_name,image2_custom_name]

    # Create custom links for the visualization
    custom = {
            "IMAGE_1_LINK":"/images/%s" % (image1.pk),
            "IMAGE_2_LINK":"/images/%s" % (image2.pk)
    }

    # create reduced representation in case it's not there
    if not image1.reduced_representation:
        image1 = save_resampled_transformation_single(image1.id) # cannot run this async
    if not image2.reduced_representation:
        image2 = save_resampled_transformation_single(image1.id) # cannot run this async

    # Load image vectors from npy files
    image_vector1 = np.load(image1.reduced_representation.file)
    image_vector2 = np.load(image2.reduced_representation.file)

    # Load atlas pickle, containing vectors of atlas labels, colors, and values for same voxel dimension (4mm)
    this_path = os.path.abspath(os.path.dirname(__file__))
    atlas_pkl_path = os.path.join(this_path, 'static/atlas/atlas_mni_4mm.pkl')
    atlas = joblib.load(atlas_pkl_path)

    # Load the atlas svg, so we don't need to dynamically generate it
    atlas_svg = os.path.join(this_path, 'static/atlas/atlas_mni_2mm_svg.pkl')
    atlas_svg = joblib.load(atlas_svg)

    # Generate html for similarity search, do not specify atlas
    html_snippet, _ = scatterplot.scatterplot_compare_vector(image_vector1=image_vector1,
                                                                 image_vector2=image_vector2,
                                                                 image_names=image_names,
                                                                 atlas_vector=atlas["atlas_vector"],
                                                                 atlas_labels=atlas["atlas_labels"],
                                                                 atlas_colors=atlas["atlas_colors"],
                                                                 corr_type="pearson",
                                                                 subsample_every=10, # subsample every 10th voxel
                                                                 custom=custom,
                                                                 remove_scripts="D3_MIN_JS",
                                                                 width=1000)

    # Add atlas svg to the image, and prepare html for rendering
    html = [h.replace("[coronal]",atlas_svg) for h in html_snippet]
    html = [h.strip("\n").replace("[axial]","").replace("[sagittal]","") for h in html]
    context = {'html': html}

    # Determine if either image is thresholded
    threshold_status = np.array([image_names[i] for i in range(0,2) if images[i].is_thresholded])
    if len(threshold_status) > 0:
        warnings = list()
        for i in range(0,len(image_names)):
            warnings.append('Warning: Thresholded image: %s (%.4g%% of voxels are zeros),' %(image_names[i],images[i].perc_bad_voxels))
        context["warnings"] = warnings

    return render(request, 'statmaps/compare_images.html', context)