def testaddAFNI(self): post_dict = { 'name': "test map", 'cognitive_paradigm_cogatlas': 'trm_4f24126c22011', 'modality':'fMRI-BOLD', 'map_type': 'T', 'collection':self.coll.pk, } testpath = os.path.abspath(os.path.dirname(__file__)) fname = os.path.join(testpath,'test_data/statmaps/saccade.I_C.MNI.nii.gz') nii = nb.load(fname) self.assertTrue(detect_4D(nii)) self.assertTrue(len(split_4D_to_3D(nii)) > 0) file_dict = {'file': SimpleUploadedFile(fname, open(fname).read())} form = StatisticMapForm(post_dict, file_dict) self.assertTrue(form.is_valid()) form.save() self.assertEqual(StatisticMap.objects.filter(collection=self.coll.pk).count(), 2)
def populate_afni(image): try: orig_name = image.name tmpdir = tempfile.mkdtemp() bricks = split_4D_to_3D(image.file.path, tmp_dir=tmpdir) for label, brick in bricks: brick_fname = os.path.split(brick)[-1] mfile = memory_uploadfile(brick, brick_fname, image.file) brick_img = Image(name='%s - %s' % (orig_name, label), file=mfile) for field in ['collection', 'description', 'map_type', 'tags']: setattr(brick_img, field, getattr(image, field)) if image.tags.exists(): brick_img.save() # generate PK before copying tags for tag in image.tags.all(): tagobj = ValueTaggedItem(content_object=brick_img, tag=tag) tagobj.save() brick_img.save() finally: print 'converted afni4d %s to %s sub-brick images.' % (orig_name, len(bricks)) shutil.rmtree(tmpdir) os.remove(image.file.path) image.delete()
def testaddAFNI(self): post_dict = { 'name': "test map", 'cognitive_paradigm_cogatlas': 'trm_4f24126c22011', 'modality': 'fMRI-BOLD', 'map_type': 'T', 'number_of_subjects': 10, 'analysis_level': 'G', 'collection': self.coll.pk, 'target_template_image': 'GenericMNI', } testpath = os.path.abspath(os.path.dirname(__file__)) fname = os.path.join(testpath, 'test_data/statmaps/saccade.I_C.MNI.nii.gz') nii = nb.load(fname) self.assertTrue(detect_4D(nii)) self.assertTrue(len(split_4D_to_3D(nii)) > 0) file_dict = {'file': SimpleUploadedFile(fname, open(fname).read())} form = StatisticMapForm(post_dict, file_dict) self.assertTrue(form.is_valid()) form.save() self.assertEqual( StatisticMap.objects.filter(collection=self.coll.pk).count(), 2)
def populate_afni(image): try: orig_name = image.name tmpdir = tempfile.mkdtemp() bricks = split_4D_to_3D(image.file.path,tmp_dir=tmpdir) for label,brick in bricks: brick_fname = os.path.split(brick)[-1] mfile = memory_uploadfile(brick, brick_fname, image.file) brick_img = Image(name='%s - %s' % (orig_name, label), file=mfile) for field in ['collection','description','map_type','tags']: setattr(brick_img, field, getattr(image,field)) if image.tags.exists(): brick_img.save() # generate PK before copying tags for tag in image.tags.all(): tagobj = ValueTaggedItem(content_object=brick_img,tag=tag) tagobj.save() brick_img.save() finally: print 'converted afni4d %s to %s sub-brick images.' % (orig_name,len(bricks)) shutil.rmtree(tmpdir) os.remove(image.file.path) image.delete()
def setUp(self): print "Preparing to test image comparison..." self.tmpdir = tempfile.mkdtemp() app_path = os.path.abspath(os.path.dirname(__file__)) self.u1 = User.objects.create(username='******') self.comparisonCollection1 = Collection(name='comparisonCollection1', owner=self.u1, DOI='10.3389/fninf.2015.00008') self.comparisonCollection1.save() self.comparisonCollection2 = Collection(name='comparisonCollection2', owner=self.u1, DOI='10.3389/fninf.2015.00009') self.comparisonCollection2.save() self.comparisonCollection3 = Collection(name='comparisonCollection3', owner=self.u1, DOI='10.3389/fninf.2015.00010') self.comparisonCollection3.save() self.comparisonCollection4 = Collection(name='comparisonCollection4', owner=self.u1, DOI='10.3389/fninf.2015.00011') self.comparisonCollection4.save() self.comparisonCollection5 = Collection(name='comparisonCollection5', owner=self.u1, DOI='10.3389/fninf.2015.00012') self.comparisonCollection5.save() image1 = save_statmap_form(image_path=os.path.join(app_path,'test_data/api/VentralFrontal_thr75_summaryimage_2mm.nii.gz'), collection=self.comparisonCollection1, image_name = "image1", ignore_file_warning=True) self.pk1 = image1.id # Image 2 is equivalent to 1, so pearson should be 1.0 image2 = save_statmap_form(image_path=os.path.join(app_path,'test_data/api/VentralFrontal_thr75_summaryimage_2mm.nii.gz'), collection=self.comparisonCollection2, image_name = "image1_copy", ignore_file_warning=True) self.pk1_copy = image2.id # "Bricks" images bricks = split_4D_to_3D(nibabel.load(os.path.join(app_path,'test_data/TTatlas.nii.gz')),tmp_dir=self.tmpdir) image3 = save_statmap_form(image_path=bricks[0][1],collection=self.comparisonCollection3,image_name="image2",ignore_file_warning=True) self.pk2 = image3.id image4 = save_statmap_form(image_path=bricks[1][1],collection=self.comparisonCollection4,image_name="image3",ignore_file_warning=True) self.pk3 = image4.id # This last image is a statmap with NaNs to test that transformation doesn't eliminate them image_nan = save_statmap_form(image_path=os.path.join(app_path,'test_data/statmaps/motor_lips_nan.nii.gz'), collection=self.comparisonCollection5, image_name = "image_nan", ignore_file_warning=True) self.pknan = image_nan.id Similarity.objects.update_or_create(similarity_metric="pearson product-moment correlation coefficient", transformation="voxelwise", metric_ontology_iri="http://webprotege.stanford.edu/RCS8W76v1MfdvskPLiOdPaA", transformation_ontology_iri="http://webprotege.stanford.edu/R87C6eFjEftkceScn1GblDL") self.pearson_metric = Similarity.objects.filter(similarity_metric="pearson product-moment correlation coefficient", transformation="voxelwise", metric_ontology_iri="http://webprotege.stanford.edu/RCS8W76v1MfdvskPLiOdPaA", transformation_ontology_iri="http://webprotege.stanford.edu/R87C6eFjEftkceScn1GblDL")
def testAfni4DSlicing(self): test_afni = detect_4D(nibabel.load(self.afni_file)) test_non_afni = detect_4D(nibabel.load(self.nii_file)) bricks = split_4D_to_3D(nibabel.load(self.afni_file), tmp_dir=self.tmpdir) # check detection of 4D is correct self.assertTrue(test_afni) self.assertFalse(test_non_afni) # check for 2 sub bricks self.assertEquals(len(bricks), 2) # check that brick labels match afni 3dinfo binary output self.assertEquals(bricks[0][0], "uu3[0]") self.assertEquals(bricks[1][0], "uu5[0]") # check that sliced niftis exist at output location self.assertTrue(os.path.exists(bricks[0][1])) self.assertTrue(os.path.exists(bricks[1][1]))
def testAfni4DSlicing(self): test_afni = detect_4D(nibabel.load(self.afni_file)) test_non_afni = detect_4D(nibabel.load(self.nii_file)) bricks = split_4D_to_3D(nibabel.load(self.afni_file),tmp_dir=self.tmpdir) # check detection of 4D is correct self.assertTrue(test_afni) self.assertFalse(test_non_afni) # check for 2 sub bricks self.assertEquals(len(bricks),2) # check that brick labels match afni 3dinfo binary output self.assertEquals(bricks[0][0],'uu3[0]') self.assertEquals(bricks[1][0],'uu5[0]') # check that sliced niftis exist at output location self.assertTrue(os.path.exists(bricks[0][1])) self.assertTrue(os.path.exists(bricks[1][1]))
def setUp(self): print "Preparing to test image comparison..." self.tmpdir = tempfile.mkdtemp() app_path = os.path.abspath(os.path.dirname(__file__)) self.u1 = User.objects.create(username='******') self.comparisonCollection1 = Collection(name='comparisonCollection1', owner=self.u1, DOI='10.3389/fninf.2015.00008') self.comparisonCollection1.save() self.comparisonCollection2 = Collection(name='comparisonCollection2', owner=self.u1, DOI='10.3389/fninf.2015.00009') self.comparisonCollection2.save() self.comparisonCollection3 = Collection(name='comparisonCollection3', owner=self.u1, DOI='10.3389/fninf.2015.00010') self.comparisonCollection3.save() self.comparisonCollection4 = Collection(name='comparisonCollection4', owner=self.u1, DOI='10.3389/fninf.2015.00011') self.comparisonCollection4.save() self.comparisonCollection5 = Collection(name='comparisonCollection5', owner=self.u1, DOI='10.3389/fninf.2015.00012') self.comparisonCollection5.save() image1 = save_statmap_form(image_path=os.path.join( app_path, 'test_data/api/VentralFrontal_thr75_summaryimage_2mm.nii.gz'), collection=self.comparisonCollection1, image_name="image1", ignore_file_warning=True) self.pk1 = image1.id # Image 2 is equivalent to 1, so pearson should be 1.0 image2 = save_statmap_form(image_path=os.path.join( app_path, 'test_data/api/VentralFrontal_thr75_summaryimage_2mm.nii.gz'), collection=self.comparisonCollection2, image_name="image1_copy", ignore_file_warning=True) self.pk1_copy = image2.id # "Bricks" images bricks = split_4D_to_3D(nibabel.load( os.path.join(app_path, 'test_data/TTatlas.nii.gz')), tmp_dir=self.tmpdir) image3 = save_statmap_form(image_path=bricks[0][1], collection=self.comparisonCollection3, image_name="image2", ignore_file_warning=True) self.pk2 = image3.id image4 = save_statmap_form(image_path=bricks[1][1], collection=self.comparisonCollection4, image_name="image3", ignore_file_warning=True) self.pk3 = image4.id # This last image is a statmap with NaNs to test that transformation doesn't eliminate them image_nan = save_statmap_form(image_path=os.path.join( app_path, 'test_data/statmaps/motor_lips_nan.nii.gz'), collection=self.comparisonCollection5, image_name="image_nan", ignore_file_warning=True) self.pknan = image_nan.id Similarity.objects.update_or_create( similarity_metric="pearson product-moment correlation coefficient", transformation="voxelwise", metric_ontology_iri= "http://webprotege.stanford.edu/RCS8W76v1MfdvskPLiOdPaA", transformation_ontology_iri= "http://webprotege.stanford.edu/R87C6eFjEftkceScn1GblDL") self.pearson_metric = Similarity.objects.filter( similarity_metric="pearson product-moment correlation coefficient", transformation="voxelwise", metric_ontology_iri= "http://webprotege.stanford.edu/RCS8W76v1MfdvskPLiOdPaA", transformation_ontology_iri= "http://webprotege.stanford.edu/R87C6eFjEftkceScn1GblDL")
del cleaned_data["hdr_file"] return cleaned_data # check if it is really nifti try: # print file_map if "header" in file_map: nii = nb.Nifti1Pair.from_file_map(file_map) else: nii = nb.Nifti1Image.from_file_map(file_map) except Exception as e: raise # detect AFNI 4D files and prepare 3D slices if nii is not None and detect_4D(nii): self.afni_subbricks = split_4D_to_3D(nii, tmp_dir=tmp_dir) else: squeezable_dimensions = len([a for a in nii.shape if a not in [0, 1]]) if squeezable_dimensions != 3: self._errors["file"] = self.error_class( ["4D files are not supported.\n " "If it's multiple maps in one " "file please split them and " "upload separately"]) del cleaned_data["file"] return cleaned_data # convert to nii.gz if needed if (ext.lower() != ".nii.gz" or squeezable_dimensions < len(nii.shape)):
def clean_and_validate(self, cleaned_data): file = cleaned_data.get('file') if file: # check extension of the data file _, fname, ext = split_filename(file.name) if not ext.lower() in [".nii.gz", ".nii", ".img"]: self._errors["file"] = self.error_class( ["Doesn't have proper extension"]) del cleaned_data["file"] return cleaned_data # prepare file to loading into memory file.open() fileobj = file.file if file.name.lower().endswith(".gz"): fileobj = GzipFile(filename=file.name, mode='rb', fileobj=fileobj) file_map = {'image': nb.FileHolder(file.name, fileobj)} try: tmp_dir = tempfile.mkdtemp() if ext.lower() == ".img": hdr_file = cleaned_data.get('hdr_file') if hdr_file: # check extension of the hdr file _, _, hdr_ext = split_filename(hdr_file.name) if not hdr_ext.lower() in [".hdr"]: self._errors["hdr_file"] = self.error_class( ["Doesn't have proper extension"]) del cleaned_data["hdr_file"] return cleaned_data else: hdr_file.open() file_map["header"] = nb.FileHolder( hdr_file.name, hdr_file.file) else: self._errors["hdr_file"] = self.error_class( [".img file requires .hdr file"]) del cleaned_data["hdr_file"] return cleaned_data # check if it is really nifti try: print file_map if "header" in file_map: nii = nb.Nifti1Pair.from_file_map(file_map) else: nii = nb.Nifti1Image.from_file_map(file_map) except Exception as e: raise # detect AFNI 4D files and prepare 3D slices if nii is not None and detect_4D(nii): self.afni_subbricks = split_4D_to_3D(nii, tmp_dir=tmp_dir) else: squeezable_dimensions = len( filter(lambda a: a not in [0, 1], nii.shape)) if squeezable_dimensions != 3: self._errors["file"] = self.error_class([ "4D files are not supported.\n " "If it's multiple maps in one " "file please split them and " "upload separately" ]) del cleaned_data["file"] return cleaned_data # convert to nii.gz if needed if (ext.lower() != ".nii.gz" or squeezable_dimensions < len(nii.shape)): # convert pseudo 4D to 3D if squeezable_dimensions < len(nii.shape): new_data = np.squeeze(nii.get_data()) nii = nb.Nifti1Image(new_data, nii.get_affine(), nii.get_header()) # Papaya does not handle float64, but by converting # files we loose precision # if nii.get_data_dtype() == np.float64: # ii.set_data_dtype(np.float32) new_name = fname + ".nii.gz" nii_tmp = os.path.join(tmp_dir, new_name) nb.save(nii, nii_tmp) print "updating file in cleaned_data" cleaned_data['file'] = memory_uploadfile( nii_tmp, new_name, cleaned_data['file']) finally: try: if self.afni_subbricks: # keep temp dir for AFNI slicing self.afni_tmp = tmp_dir else: print "removing %s" % tmp_dir shutil.rmtree(tmp_dir) except OSError as exc: if exc.errno != 2: # code 2 - no such file or directory raise # re-raise exception elif not getattr(self, 'partial', False): # Skip validation error if this is a partial update from the API raise ValidationError("Couldn't read uploaded file") return cleaned_data
def clean_and_validate(self, cleaned_data): file = cleaned_data.get('file') if file: # check extension of the data file _, fname, ext = split_filename(file.name) if not ext.lower() in [".nii.gz", ".nii", ".img"]: self._errors["file"] = self.error_class( ["Doesn't have proper extension"] ) del cleaned_data["file"] return cleaned_data # prepare file to loading into memory file.open() fileobj = file.file if file.name.lower().endswith(".gz"): fileobj = GzipFile(filename=file.name, mode='rb', fileobj=fileobj) file_map = {'image': nb.FileHolder(file.name, fileobj)} try: tmp_dir = tempfile.mkdtemp() if ext.lower() == ".img": hdr_file = cleaned_data.get('hdr_file') if hdr_file: # check extension of the hdr file _, _, hdr_ext = split_filename(hdr_file.name) if not hdr_ext.lower() in [".hdr"]: self._errors["hdr_file"] = self.error_class( ["Doesn't have proper extension"]) del cleaned_data["hdr_file"] return cleaned_data else: hdr_file.open() file_map["header"] = nb.FileHolder(hdr_file.name, hdr_file.file) else: self._errors["hdr_file"] = self.error_class( [".img file requires .hdr file"] ) del cleaned_data["hdr_file"] return cleaned_data # check if it is really nifti try: print file_map if "header" in file_map: nii = nb.Nifti1Pair.from_file_map(file_map) else: nii = nb.Nifti1Image.from_file_map(file_map) except Exception as e: raise # detect AFNI 4D files and prepare 3D slices if nii is not None and detect_4D(nii): self.afni_subbricks = split_4D_to_3D(nii, tmp_dir=tmp_dir) else: squeezable_dimensions = len( filter(lambda a: a not in [0, 1], nii.shape) ) if squeezable_dimensions != 3: self._errors["file"] = self.error_class( ["4D files are not supported.\n " "If it's multiple maps in one " "file please split them and " "upload separately"]) del cleaned_data["file"] return cleaned_data # convert to nii.gz if needed if (ext.lower() != ".nii.gz" or squeezable_dimensions < len(nii.shape)): # convert pseudo 4D to 3D if squeezable_dimensions < len(nii.shape): new_data = np.squeeze(nii.get_data()) nii = nb.Nifti1Image(new_data, nii.get_affine(), nii.get_header()) # Papaya does not handle float64, but by converting # files we loose precision # if nii.get_data_dtype() == np.float64: # ii.set_data_dtype(np.float32) new_name = fname + ".nii.gz" nii_tmp = os.path.join(tmp_dir, new_name) nb.save(nii, nii_tmp) print "updating file in cleaned_data" cleaned_data['file'] = memory_uploadfile( nii_tmp, new_name, cleaned_data['file'] ) finally: try: if self.afni_subbricks: # keep temp dir for AFNI slicing self.afni_tmp = tmp_dir else: print "removing %s"%tmp_dir shutil.rmtree(tmp_dir) except OSError as exc: if exc.errno != 2: # code 2 - no such file or directory raise # re-raise exception elif not getattr(self, 'partial', False): # Skip validation error if this is a partial update from the API raise ValidationError("Couldn't read uploaded file") return cleaned_data
def upload_folder(request, collection_cid): collection = get_collection(collection_cid,request) allowed_extensions = ['.nii', '.img', '.nii.gz'] niftiFiles = [] if request.method == 'POST': print request.POST print request.FILES form = UploadFileForm(request.POST, request.FILES) if form.is_valid(): tmp_directory = tempfile.mkdtemp() print tmp_directory try: # Save archive (.zip or .tar.gz) to disk if "file" in request.FILES: archive_name = request.FILES['file'].name if fnmatch(archive_name,'*.nidm.zip'): form = populate_nidm_results(request,collection) if not form: messages.warning(request, "Invalid NIDM-Results file.") return HttpResponseRedirect(collection.get_absolute_url()) _, archive_ext = os.path.splitext(archive_name) if archive_ext == '.zip': compressed = zipfile.ZipFile(request.FILES['file']) elif archive_ext == '.gz': django_file = request.FILES['file'] django_file.open() compressed = tarfile.TarFile(fileobj=gzip.GzipFile(fileobj=django_file.file, mode='r'), mode='r') else: raise Exception("Unsupported archive type %s."%archive_name) compressed.extractall(path=tmp_directory) elif "file_input[]" in request.FILES: for f, path in zip(request.FILES.getlist( "file_input[]"), request.POST.getlist("paths[]")): if fnmatch(f.name,'*.nidm.zip'): request.FILES['file'] = f populate_nidm_results(request,collection) continue new_path, _ = os.path.split(os.path.join(tmp_directory, path)) mkdir_p(new_path) filename = os.path.join(new_path,f.name) tmp_file = open(filename, 'w') tmp_file.write(f.read()) tmp_file.close() else: raise Exception("Unable to find uploaded files.") atlases = {} for root, subdirs, filenames in os.walk(tmp_directory): if detect_feat_directory(root): populate_feat_directory(request,collection,root) del(subdirs) filenames = [] # .gfeat parent dir under cope*.feat should not be added as statmaps # this may be affected by future nidm-results_fsl parsing changes if root.endswith('.gfeat'): filenames = [] filenames = [f for f in filenames if not f[0] == '.'] for fname in sorted(filenames): name, ext = splitext_nii_gz(fname) nii_path = os.path.join(root, fname) if ext == '.xml': print "found xml" dom = minidom.parse(os.path.join(root, fname)) for atlas in dom.getElementsByTagName("summaryimagefile"): print "found atlas" path, base = os.path.split(atlas.lastChild.nodeValue) nifti_name = os.path.join(path, base) atlases[str(os.path.join(root, nifti_name[1:]))] = os.path.join(root, fname) if ext in allowed_extensions: nii = nib.load(nii_path) if detect_4D(nii): niftiFiles.extend(split_4D_to_3D(nii)) else: niftiFiles.append((fname,nii_path)) for label,fpath in niftiFiles: # Read nifti file information nii = nib.load(fpath) if len(nii.get_shape()) > 3 and nii.get_shape()[3] > 1: messages.warning(request, "Skipping %s - not a 3D file."%label) continue hdr = nii.get_header() raw_hdr = hdr.structarr # SPM only !!! # Check if filename corresponds to a T-map Tregexp = re.compile('spmT.*') # Fregexp = re.compile('spmF.*') if Tregexp.search(fpath) is not None: map_type = StatisticMap.T else: # Check if filename corresponds to a F-map if Tregexp.search(fpath) is not None: map_type = StatisticMap.F else: map_type = StatisticMap.OTHER path, name, ext = split_filename(fpath) dname = name + ".nii.gz" spaced_name = name.replace('_',' ').replace('-',' ') if ext.lower() != ".nii.gz": new_file_tmp_dir = tempfile.mkdtemp() new_file_tmp = os.path.join(new_file_tmp_dir, name) + '.nii.gz' nib.save(nii, new_file_tmp) f = ContentFile(open(new_file_tmp).read(), name=dname) shutil.rmtree(new_file_tmp_dir) label += " (old ext: %s)" % ext else: f = ContentFile(open(fpath).read(), name=dname) collection = get_collection(collection_cid,request) if os.path.join(path, name) in atlases: new_image = Atlas(name=spaced_name, description=raw_hdr['descrip'], collection=collection) new_image.label_description_file = ContentFile( open(atlases[os.path.join(path,name)]).read(), name=name + ".xml") else: new_image = StatisticMap(name=spaced_name, description=raw_hdr['descrip'] or label, collection=collection) new_image.map_type = map_type new_image.file = f new_image.save() except: error = traceback.format_exc().splitlines()[-1] msg = "An error occurred with this upload: {}".format(error) messages.warning(request, msg) return HttpResponseRedirect(collection.get_absolute_url()) finally: shutil.rmtree(tmp_directory) if not niftiFiles: messages.warning(request, "No NIFTI files (.nii, .nii.gz, .img/.hdr) found in the upload.") return HttpResponseRedirect(collection.get_absolute_url()) else: form = UploadFileForm() return render_to_response("statmaps/upload_folder.html", {'form': form}, RequestContext(request))