예제 #1
0
def populate_afni(image):
    try:
        orig_name = image.name
        tmpdir = tempfile.mkdtemp()
        bricks = split_afni4D_to_3D(image.file.path,tmp_dir=tmpdir)

        for label,brick in bricks:
            brick_fname = os.path.split(brick)[-1]
            mfile = memory_uploadfile(brick, brick_fname, image.file)
            brick_img = Image(name='%s - %s' % (orig_name, label), file=mfile)
            for field in ['collection','description','map_type','tags']:
                setattr(brick_img, field, getattr(image,field))

            if image.tags.exists():
                brick_img.save()  # generate PK before copying tags
                for tag in image.tags.all():
                    tagobj = ValueTaggedItem(content_object=brick_img,tag=tag)
                    tagobj.save()
            brick_img.save()

    finally:
        print 'converted afni4d %s to %s sub-brick images.' % (orig_name,len(bricks))
        shutil.rmtree(tmpdir)
        os.remove(image.file.path)
        image.delete()
예제 #2
0
    def setUp(self):
        print "Preparing to test image comparison..."
        self.tmpdir = tempfile.mkdtemp()
        app_path = os.path.abspath(os.path.dirname(__file__))
        self.u1 = User.objects.create(username='******')
        self.comparisonCollection = Collection(name='comparisonCollection',owner=self.u1)
        self.comparisonCollection.save()

        image1 = save_statmap_form(image_path=os.path.join(app_path,'test_data/api/VentralFrontal_thr75_summaryimage_2mm.nii.gz'),
                              collection=self.comparisonCollection,
                              image_name = "image1",
                              ignore_file_warning=True)
        self.pk1 = image1.id
                
        # Image 2 is equivalent to 1, so pearson should be 1.0
        image2 = save_statmap_form(image_path=os.path.join(app_path,'test_data/api/VentralFrontal_thr75_summaryimage_2mm.nii.gz'),
                              collection=self.comparisonCollection,
                              image_name = "image1_copy",
                              ignore_file_warning=True)
        self.pk1_copy = image2.id
        
        # "Bricks" images
        bricks = split_afni4D_to_3D(nibabel.load(os.path.join(app_path,'test_data/TTatlas.nii.gz')),tmp_dir=self.tmpdir)
        image3 = save_statmap_form(image_path=bricks[0][1],collection=self.comparisonCollection,image_name="image2",ignore_file_warning=True)
        self.pk2 = image3.id     
        image4 = save_statmap_form(image_path=bricks[1][1],collection=self.comparisonCollection,image_name="image3",ignore_file_warning=True)
        self.pk3 = image4.id

        # This last image is a statmap with NaNs to test that transformation doesn't eliminate them
        image_nan = save_statmap_form(image_path=os.path.join(app_path,'test_data/statmaps/motor_lips_nan.nii.gz'),
                                      collection=self.comparisonCollection,
                                      image_name = "image_nan",
                                      ignore_file_warning=True)
        self.pknan = image_nan.id
                        
        Similarity.objects.update_or_create(similarity_metric="pearson product-moment correlation coefficient",
                                         transformation="voxelwise",
                                         metric_ontology_iri="http://webprotege.stanford.edu/RCS8W76v1MfdvskPLiOdPaA",
                                         transformation_ontology_iri="http://webprotege.stanford.edu/R87C6eFjEftkceScn1GblDL")
        self.pearson_metric = Similarity.objects.filter(similarity_metric="pearson product-moment correlation coefficient",
                                         transformation="voxelwise",
                                         metric_ontology_iri="http://webprotege.stanford.edu/RCS8W76v1MfdvskPLiOdPaA",
                                         transformation_ontology_iri="http://webprotege.stanford.edu/R87C6eFjEftkceScn1GblDL")        
예제 #3
0
    def testAfni4DSlicing(self):
        test_afni = detect_afni4D(self.afni_file)
        test_non_afni = detect_afni4D(self.nii_file)

        bricks = split_afni4D_to_3D(self.afni_file,tmp_dir=self.tmpdir)

        # check detection of 4D is correct
        self.assertTrue(test_afni)
        self.assertFalse(test_non_afni)

        # check for 2 sub bricks
        self.assertEquals(len(bricks),2)

        # check that brick labels match afni 3dinfo binary output
        self.assertEquals(bricks[0][0],'uu3[0]')
        self.assertEquals(bricks[1][0],'uu5[0]')

        # check that sliced niftis exist at output location
        self.assertTrue(os.path.exists(bricks[0][1]))
        self.assertTrue(os.path.exists(bricks[1][1]))
예제 #4
0
    def testAfni4DSlicing(self):
        test_afni = detect_afni4D(nibabel.load(self.afni_file))
        test_non_afni = detect_afni4D(nibabel.load(self.nii_file))

        bricks = split_afni4D_to_3D(nibabel.load(self.afni_file),tmp_dir=self.tmpdir)

        # check detection of 4D is correct
        self.assertTrue(test_afni)
        self.assertFalse(test_non_afni)

        # check for 2 sub bricks
        self.assertEquals(len(bricks),2)

        # check that brick labels match afni 3dinfo binary output
        self.assertEquals(bricks[0][0],'uu3[0]')
        self.assertEquals(bricks[1][0],'uu5[0]')

        # check that sliced niftis exist at output location
        self.assertTrue(os.path.exists(bricks[0][1]))
        self.assertTrue(os.path.exists(bricks[1][1]))
예제 #5
0
def upload_folder(request, collection_cid):
    collection = get_collection(collection_cid, request)
    allowed_extensions = ['.nii', '.img', '.nii.gz']
    niftiFiles = []
    if request.method == 'POST':
        print request.POST
        print request.FILES
        form = UploadFileForm(request.POST, request.FILES)
        if form.is_valid():
            tmp_directory = tempfile.mkdtemp()
            print tmp_directory
            try:
                # Save archive (.zip or .tar.gz) to disk
                if "file" in request.FILES:
                    archive_name = request.FILES['file'].name
                    if fnmatch(archive_name, '*.nidm.zip'):
                        populate_nidm_results(request, collection)
                        return HttpResponseRedirect(
                            collection.get_absolute_url())

                    _, archive_ext = os.path.splitext(archive_name)
                    if archive_ext == '.zip':
                        compressed = zipfile.ZipFile(request.FILES['file'])
                    elif archive_ext == '.gz':
                        django_file = request.FILES['file']
                        django_file.open()
                        compressed = tarfile.TarFile(fileobj=gzip.GzipFile(
                            fileobj=django_file.file, mode='r'),
                                                     mode='r')
                    else:
                        raise Exception("Unsupported archive type %s." %
                                        archive_name)
                    compressed.extractall(path=tmp_directory)

                elif "file_input[]" in request.FILES:

                    for f, path in zip(request.FILES.getlist("file_input[]"),
                                       request.POST.getlist("paths[]")):
                        if fnmatch(f.name, '*.nidm.zip'):
                            request.FILES['file'] = f
                            populate_nidm_results(request, collection)
                            continue

                        new_path, _ = os.path.split(
                            os.path.join(tmp_directory, path))
                        mkdir_p(new_path)
                        filename = os.path.join(new_path, f.name)
                        tmp_file = open(filename, 'w')
                        tmp_file.write(f.read())
                        tmp_file.close()
                else:
                    raise Exception("Unable to find uploaded files.")

                atlases = {}
                for root, subdirs, filenames in os.walk(tmp_directory):
                    if detect_feat_directory(root):
                        populate_feat_directory(request, collection, root)
                        del (subdirs)
                        filenames = []

                    # .gfeat parent dir under cope*.feat should not be added as statmaps
                    # this may be affected by future nidm-results_fsl parsing changes
                    if root.endswith('.gfeat'):
                        filenames = []

                    filenames = [f for f in filenames if not f[0] == '.']
                    for fname in sorted(filenames):
                        name, ext = splitext_nii_gz(fname)
                        nii_path = os.path.join(root, fname)

                        if ext == '.xml':
                            print "found xml"
                            dom = minidom.parse(os.path.join(root, fname))
                            for atlas in dom.getElementsByTagName(
                                    "summaryimagefile"):
                                print "found atlas"
                                path, base = os.path.split(
                                    atlas.lastChild.nodeValue)
                                nifti_name = os.path.join(path, base)
                                atlases[str(os.path.join(
                                    root, nifti_name[1:]))] = os.path.join(
                                        root, fname)
                        if ext in allowed_extensions:
                            nii = nib.load(nii_path)
                            if detect_afni4D(nii):
                                niftiFiles.extend(split_afni4D_to_3D(nii))
                            else:
                                niftiFiles.append((fname, nii_path))

                for label, fpath in niftiFiles:
                    # Read nifti file information
                    nii = nib.load(fpath)
                    if len(nii.get_shape()) > 3 and nii.get_shape()[3] > 1:
                        print "skipping wrong size"
                        continue
                    hdr = nii.get_header()
                    raw_hdr = hdr.structarr

                    # SPM only !!!
                    # Check if filename corresponds to a T-map
                    Tregexp = re.compile('spmT.*')
                    # Fregexp = re.compile('spmF.*')

                    if Tregexp.search(fpath) is not None:
                        map_type = StatisticMap.T
                    else:
                        # Check if filename corresponds to a F-map
                        if Tregexp.search(fpath) is not None:
                            map_type = StatisticMap.F
                        else:
                            map_type = StatisticMap.OTHER

                    path, name, ext = split_filename(fpath)
                    dname = name + ".nii.gz"
                    spaced_name = name.replace('_', ' ').replace('-', ' ')

                    if ext.lower() != ".nii.gz":
                        new_file_tmp_dir = tempfile.mkdtemp()
                        new_file_tmp = os.path.join(new_file_tmp_dir,
                                                    name) + '.nii.gz'
                        nib.save(nii, new_file_tmp)
                        f = ContentFile(open(new_file_tmp).read(), name=dname)
                        shutil.rmtree(new_file_tmp_dir)
                        label += " (old ext: %s)" % ext
                    else:
                        f = ContentFile(open(fpath).read(), name=dname)

                    collection = get_collection(collection_cid, request)

                    if os.path.join(path, name) in atlases:

                        new_image = Atlas(name=spaced_name,
                                          description=raw_hdr['descrip'],
                                          collection=collection)

                        new_image.label_description_file = ContentFile(
                            open(atlases[os.path.join(path, name)]).read(),
                            name=name + ".xml")
                    else:
                        new_image = StatisticMap(name=spaced_name,
                                                 description=raw_hdr['descrip']
                                                 or label,
                                                 collection=collection)
                        new_image.map_type = map_type

                    new_image.file = f
                    new_image.save()

            except:
                raise
                error = traceback.format_exc().splitlines()[-1]
                msg = "An error occurred with this upload: {}".format(error)
                messages.warning(request, msg)
                return HttpResponseRedirect(collection.get_absolute_url())

            finally:
                shutil.rmtree(tmp_directory)

            return HttpResponseRedirect(collection.get_absolute_url())
    else:
        form = UploadFileForm()
    return render_to_response("statmaps/upload_folder.html", {'form': form},
                              RequestContext(request))
예제 #6
0
    def clean(self, **kwargs):

        cleaned_data = super(ImageForm, self).clean()
        file = cleaned_data.get("file")

        if file:
            # check extension of the data filr
            _, fname, ext = split_filename(file.name)
            if not ext.lower() in [".nii.gz", ".nii", ".img"]:
                self._errors["file"] = self.error_class(["Doesn't have proper extension"])
                del cleaned_data["file"]
                return cleaned_data

            try:
                tmp_dir = tempfile.mkdtemp()
                if ext.lower() == ".img":
                    hdr_file = cleaned_data.get('hdr_file')
                    if hdr_file:

                        # check extension of the hdr file
                        _, _, hdr_ext = split_filename(hdr_file.name)
                        if not hdr_ext.lower() in [".hdr"]:
                            self._errors["hdr_file"] = self.error_class(
                                ["Doesn't have proper extension"])
                            del cleaned_data["hdr_file"]
                            return cleaned_data
                        else:
                            # write the header file to a temporary directory
                            hf = open(os.path.join(tmp_dir, fname + ".hdr"), "wb")
                            hf.write(hdr_file.file.read())
                            hf.close()
                    else:
                        self._errors["hdr_file"] = self.error_class([".img files require .hdr"])
                        del cleaned_data["hdr_file"]
                        return cleaned_data

                # write the data file to a temporary directory
                nii_tmp = os.path.join(tmp_dir, fname + ext)
                f = open(nii_tmp, "wb")
                f.write(file.file.read())
                f.close()

                # check if it is really nifti
                try:
                    nii = nb.load(nii_tmp)
                except Exception as e:
                    self._errors["file"] = self.error_class([str(e)])
                    del cleaned_data["file"]
                    return cleaned_data

                # convert to nii.gz if needed
                if ext.lower() != ".nii.gz":

                    #Papaya does not handle float64, but by converting files we loose precision
                    #if nii.get_data_dtype() == np.float64:
                    #ii.set_data_dtype(np.float32)
                    new_name = fname + ".nii.gz"
                    nii_tmp = os.path.join(tmp_dir, new_name)
                    nb.save(nii, nii_tmp)

                    cleaned_data['file'] = memory_uploadfile(nii_tmp, new_name,
                                                             cleaned_data['file'])

                # detect AFNI 4D files and prepare 3D slices
                if nii_tmp is not None and detect_afni4D(nii_tmp):
                    self.afni_subbricks = split_afni4D_to_3D(nii_tmp)

            finally:
                try:
                    if self.afni_subbricks:
                        self.afni_tmp = tmp_dir  # keep temp dir for AFNI slicing
                    else:
                        shutil.rmtree(tmp_dir)
                except OSError as exc:
                    if exc.errno != 2:  # code 2 - no such file or directory
                        raise  # re-raise exception
        else:
            raise ValidationError("Couldn't read uploaded file")
        return cleaned_data
예제 #7
0
def upload_folder(request, collection_cid):
    collection = get_collection(collection_cid,request)
    allowed_extensions = ['.nii', '.img', '.nii.gz']
    niftiFiles = []
    if request.method == 'POST':
        print request.POST
        print request.FILES
        form = UploadFileForm(request.POST, request.FILES)
        if form.is_valid():
            tmp_directory = tempfile.mkdtemp()
            print tmp_directory
            try:
                # Save archive (.zip or .tar.gz) to disk
                if "file" in request.FILES:
                    archive_name = request.FILES['file'].name
                    if fnmatch(archive_name,'*.nidm.zip'):
                        populate_nidm_results(request,collection)
                        return HttpResponseRedirect(collection.get_absolute_url())

                    _, archive_ext = os.path.splitext(archive_name)
                    if archive_ext == '.zip':
                        compressed = zipfile.ZipFile(request.FILES['file'])
                    elif archive_ext == '.gz':
                        django_file = request.FILES['file']
                        django_file.open()
                        compressed = tarfile.TarFile(fileobj=gzip.GzipFile(fileobj=django_file.file, mode='r'), mode='r')
                    else:
                        raise Exception("Unsupported archive type %s."%archive_name)
                    compressed.extractall(path=tmp_directory)

                elif "file_input[]" in request.FILES:

                    for f, path in zip(request.FILES.getlist(
                                       "file_input[]"), request.POST.getlist("paths[]")):
                        if fnmatch(f.name,'*.nidm.zip'):
                            request.FILES['file'] = f
                            populate_nidm_results(request,collection)
                            continue

                        new_path, _ = os.path.split(os.path.join(tmp_directory, path))
                        mkdir_p(new_path)
                        filename = os.path.join(new_path,f.name)
                        tmp_file = open(filename, 'w')
                        tmp_file.write(f.read())
                        tmp_file.close()
                else:
                    raise Exception("Unable to find uploaded files.")

                atlases = {}
                for root, subdirs, filenames in os.walk(tmp_directory):
                    if detect_feat_directory(root):
                        populate_feat_directory(request,collection,root)
                        del(subdirs)
                        filenames = []

                    # .gfeat parent dir under cope*.feat should not be added as statmaps
                    # this may be affected by future nidm-results_fsl parsing changes
                    if root.endswith('.gfeat'):
                        filenames = []

                    filenames = [f for f in filenames if not f[0] == '.']
                    for fname in sorted(filenames):
                        name, ext = splitext_nii_gz(fname)
                        nii_path = os.path.join(root, fname)
                        
                        if ext == '.xml':
                            print "found xml"
                            dom = minidom.parse(os.path.join(root, fname))
                            for atlas in dom.getElementsByTagName("summaryimagefile"):
                                print "found atlas"
                                path, base = os.path.split(atlas.lastChild.nodeValue)
                                nifti_name = os.path.join(path, base)
                                atlases[str(os.path.join(root,
                                            nifti_name[1:]))] = os.path.join(root, fname)
                        if ext in allowed_extensions:
                            nii = nib.load(nii_path)
                            if detect_afni4D(nii):
                                niftiFiles.extend(split_afni4D_to_3D(nii))
                            else:
                                niftiFiles.append((fname,nii_path))
                
                for label,fpath in niftiFiles:
                    # Read nifti file information
                    nii = nib.load(fpath)
                    if len(nii.get_shape()) > 3 and nii.get_shape()[3] > 1:
                        print "skipping wrong size"
                        continue
                    hdr = nii.get_header()
                    raw_hdr = hdr.structarr

                    # SPM only !!!
                    # Check if filename corresponds to a T-map
                    Tregexp = re.compile('spmT.*')
                    # Fregexp = re.compile('spmF.*')

                    if Tregexp.search(fpath) is not None:
                        map_type = StatisticMap.T
                    else:
                        # Check if filename corresponds to a F-map
                        if Tregexp.search(fpath) is not None:
                            map_type = StatisticMap.F
                        else:
                            map_type = StatisticMap.OTHER

                    path, name, ext = split_filename(fpath)
                    dname = name + ".nii.gz"
                    spaced_name = name.replace('_',' ').replace('-',' ')

                    if ext.lower() != ".nii.gz":
                        new_file_tmp_dir = tempfile.mkdtemp()
                        new_file_tmp = os.path.join(new_file_tmp_dir, name) + '.nii.gz'
                        nib.save(nii, new_file_tmp)
                        f = ContentFile(open(new_file_tmp).read(), name=dname)
                        shutil.rmtree(new_file_tmp_dir)
                        label += " (old ext: %s)" % ext
                    else:
                        f = ContentFile(open(fpath).read(), name=dname)

                    collection = get_collection(collection_cid,request)

                    if os.path.join(path, name) in atlases:

                        new_image = Atlas(name=spaced_name,
                                          description=raw_hdr['descrip'], collection=collection)

                        new_image.label_description_file = ContentFile(
                                    open(atlases[os.path.join(path,name)]).read(),
                                                                    name=name + ".xml")
                    else:
                        new_image = StatisticMap(name=spaced_name,
                                description=raw_hdr['descrip'] or label, collection=collection)
                        new_image.map_type = map_type

                    new_image.file = f
                    new_image.save()

            except:
                raise
                error = traceback.format_exc().splitlines()[-1]
                msg = "An error occurred with this upload: {}".format(error)
                messages.warning(request, msg)
                return HttpResponseRedirect(collection.get_absolute_url())

            finally:
                shutil.rmtree(tmp_directory)

            return HttpResponseRedirect(collection.get_absolute_url())
    else:
        form = UploadFileForm()
    return render_to_response("statmaps/upload_folder.html",
                              {'form': form},  RequestContext(request))
예제 #8
0
    def clean(self, **kwargs):
        cleaned_data = super(ImageForm, self).clean()
        file = cleaned_data.get("file")

        if file:
            # check extension of the data file
            _, fname, ext = split_filename(file.name)
            if not ext.lower() in [".nii.gz", ".nii", ".img"]:
                self._errors["file"] = self.error_class(["Doesn't have proper extension"])
                del cleaned_data["file"]
                return cleaned_data

            # prepare file to loading into memory
            file.open()
            if file.name.lower().endswith(".gz"):
                fileobj = GzipFile(filename=file.name, mode='rb', fileobj=file.file)
            else:
                fileobj=file.file
            
            file_map = {'image': nb.FileHolder(file.name, fileobj)}
            try:
                tmp_dir = tempfile.mkdtemp()
                if ext.lower() == ".img":
                    hdr_file = cleaned_data.get('hdr_file')
                    if hdr_file:
                        # check extension of the hdr file
                        _, _, hdr_ext = split_filename(hdr_file.name)
                        if not hdr_ext.lower() in [".hdr"]:
                            self._errors["hdr_file"] = self.error_class(
                                ["Doesn't have proper extension"])
                            del cleaned_data["hdr_file"]
                            return cleaned_data
                        else:
                            hdr_file.open()
                            file_map["header"] = nb.FileHolder(hdr_file.name, hdr_file.file)
                    else:
                        self._errors["hdr_file"] = self.error_class(
                                [".img file requires .hdr file"])
                        del cleaned_data["hdr_file"]
                        return cleaned_data

                # check if it is really nifti
                try:
                    print file_map
                    if "header" in file_map:
                        nii = nb.Nifti1Pair.from_file_map(file_map)
                    else:
                        nii = nb.Nifti1Image.from_file_map(file_map)
                except Exception as e:
                    raise
                    self._errors["file"] = self.error_class([str(e)])
                    del cleaned_data["file"]
                    return cleaned_data
                
                # detect AFNI 4D files and prepare 3D slices
                if nii is not None and detect_afni4D(nii):
                    self.afni_subbricks = split_afni4D_to_3D(nii)
                else:
                    squeezable_dimensions = len(filter(lambda a: a not in [0,1], nii.shape))
                    
                    if squeezable_dimensions != 3:
                        self._errors["file"] = self.error_class(["4D files are not supported.\n If it's multiple maps in one file please split them and upload separately"])
                        del cleaned_data["file"]
                        return cleaned_data
                        
    
                    # convert to nii.gz if needed
                    if ext.lower() != ".nii.gz" or squeezable_dimensions < len(nii.shape):
                        
                        #convert pseudo 4D to 3D
                        if squeezable_dimensions < len(nii.shape):
                            new_data = np.squeeze(nii.get_data())
                            nii = nb.Nifti1Image(new_data, nii.get_affine(), nii.get_header())
    
                        #Papaya does not handle float64, but by converting files we loose precision
                        #if nii.get_data_dtype() == np.float64:
                        #ii.set_data_dtype(np.float32)
                        new_name = fname + ".nii.gz"
                        nii_tmp = os.path.join(tmp_dir, new_name)
                        nb.save(nii, nii_tmp)
    
                        cleaned_data['file'] = memory_uploadfile(nii_tmp, new_name,
                                                                 cleaned_data['file'])
                

            finally:
                try:
                    if self.afni_subbricks:
                        self.afni_tmp = tmp_dir  # keep temp dir for AFNI slicing
                    else:
                        shutil.rmtree(tmp_dir)
                except OSError as exc:
                    if exc.errno != 2:  # code 2 - no such file or directory
                        raise  # re-raise exception
        else:
            raise ValidationError("Couldn't read uploaded file")
        return cleaned_data