Esempio n. 1
0
    def save_afni_slices(self, commit):
        try:
            orig_img = self.instance

            for n, (label, brick) in enumerate(self.afni_subbricks):
                brick_fname = os.path.split(brick)[-1]
                mfile = memory_uploadfile(brick, brick_fname, orig_img.file)
                brick_img = StatisticMap(name='%s - %s' %
                                         (orig_img.name, label),
                                         collection=orig_img.collection,
                                         file=mfile)
                for field in set(self.Meta.fields) - set(
                    ['file', 'hdr_file', 'name', 'collection']):
                    if field in self.cleaned_data:
                        setattr(brick_img, field, self.cleaned_data[field])

                brick_img.save()
            return orig_img.collection

        finally:
            try:
                shutil.rmtree(self.afni_tmp)
            except OSError as exc:
                if exc.errno != 2:
                    raise
Esempio n. 2
0
def populate_afni(image):
    try:
        orig_name = image.name
        tmpdir = tempfile.mkdtemp()
        bricks = split_4D_to_3D(image.file.path, tmp_dir=tmpdir)

        for label, brick in bricks:
            brick_fname = os.path.split(brick)[-1]
            mfile = memory_uploadfile(brick, brick_fname, image.file)
            brick_img = Image(name='%s - %s' % (orig_name, label), file=mfile)
            for field in ['collection', 'description', 'map_type', 'tags']:
                setattr(brick_img, field, getattr(image, field))

            if image.tags.exists():
                brick_img.save()  # generate PK before copying tags
                for tag in image.tags.all():
                    tagobj = ValueTaggedItem(content_object=brick_img, tag=tag)
                    tagobj.save()
            brick_img.save()

    finally:
        print 'converted afni4d %s to %s sub-brick images.' % (orig_name,
                                                               len(bricks))
        shutil.rmtree(tmpdir)
        os.remove(image.file.path)
        image.delete()
Esempio n. 3
0
    def save_afni_slices(self,form,commit):
        try:
            orig_img = form.instance
            first_img = None

            for n,(label,brick) in enumerate(form.afni_subbricks):
                brick_fname = os.path.split(brick)[-1]
                mfile = memory_uploadfile(brick, brick_fname, orig_img.file)
                brick_img = StatisticMap(name='%s - %s' % (orig_img.name, label), file=mfile)
                for field in ['collection','description']:
                    setattr(brick_img, field, form.cleaned_data[field])
                setattr(brick_img, 'map_type', form.data['%s-map_type' % form.prefix])

                if n == 0:
                    form.instance = brick_img
                    first_img = form.save()
                else:
                    brick_img.save()
                    for tag in first_img.tags.all():
                        tagobj = ValueTaggedItem(content_object=brick_img,tag=tag)
                        tagobj.save()

        finally:
            shutil.rmtree(form.afni_tmp)
        return form
def populate_afni(image):
    try:
        orig_name = image.name
        tmpdir = tempfile.mkdtemp()
        bricks = split_4D_to_3D(image.file.path,tmp_dir=tmpdir)

        for label,brick in bricks:
            brick_fname = os.path.split(brick)[-1]
            mfile = memory_uploadfile(brick, brick_fname, image.file)
            brick_img = Image(name='%s - %s' % (orig_name, label), file=mfile)
            for field in ['collection','description','map_type','tags']:
                setattr(brick_img, field, getattr(image,field))

            if image.tags.exists():
                brick_img.save()  # generate PK before copying tags
                for tag in image.tags.all():
                    tagobj = ValueTaggedItem(content_object=brick_img,tag=tag)
                    tagobj.save()
            brick_img.save()

    finally:
        print 'converted afni4d %s to %s sub-brick images.' % (orig_name,len(bricks))
        shutil.rmtree(tmpdir)
        os.remove(image.file.path)
        image.delete()
Esempio n. 5
0
    def save_afni_slices(self,form,commit):
        try:
            orig_img = form.instance
            first_img = None

            for n,(label,brick) in enumerate(form.afni_subbricks):
                brick_fname = os.path.split(brick)[-1]
                mfile = memory_uploadfile(brick, brick_fname, orig_img.file)
                brick_img = StatisticMap(name='%s - %s' % (orig_img.name, label), file=mfile)
                for field in ['collection','description']:
                    setattr(brick_img, field, form.cleaned_data[field])
                setattr(brick_img, 'map_type', form.data['%s-map_type' % form.prefix])

                if n == 0:
                    form.instance = brick_img
                    first_img = form.save()
                else:
                    brick_img.save()
                    for tag in first_img.tags.all():
                        tagobj = ValueTaggedItem(content_object=brick_img,tag=tag)
                        tagobj.save()

        finally:
            shutil.rmtree(form.afni_tmp)
        return form
Esempio n. 6
0
    def save_afni_slices(self, commit):
        try:
            orig_img = self.instance

            for n, (label, brick) in enumerate(self.afni_subbricks):
                brick_fname = os.path.split(brick)[-1]
                mfile = memory_uploadfile(brick, brick_fname, orig_img.file)
                brick_img = StatisticMap(name='%s - %s' % (orig_img.name, label), collection=orig_img.collection,
                                         file=mfile)
                for field in set(self.Meta.fields) - set(['file', 'hdr_file', 'name', 'collection']):
                    if field in self.cleaned_data:
                        setattr(brick_img, field, self.cleaned_data[field])

                brick_img.save()
            return orig_img.collection

        finally:
            try:
                shutil.rmtree(self.afni_tmp)
            except OSError as exc:
                if exc.errno != 2:
                    raise
Esempio n. 7
0
    def clean_and_validate(self, cleaned_data):
        print "enter clean_and_validate"
        file = cleaned_data.get('file')
        surface_left_file = cleaned_data.get('surface_left_file')
        surface_right_file = cleaned_data.get('surface_right_file')

        if surface_left_file and surface_right_file and not file:
            if "file" in self._errors.keys():
                del self._errors["file"]
            cleaned_data["data_origin"] = 'surface'
            tmp_dir = tempfile.mkdtemp()
            try:
                new_name = cleaned_data["name"] + ".nii.gz"
                ribbon_projection_file = os.path.join(tmp_dir, new_name)

                inputs_dict = {"lh": "surface_left_file",
                               "rh": "surface_right_file"}
                intent_dict = {"lh": "CortexLeft",
                               "rh": "CortexRight"}

                for hemi in ["lh", "rh"]:
                    print hemi
                    surface_file = cleaned_data.get(inputs_dict[hemi])
                    _, ext = splitext_nii_gz(surface_file.name)

                    if not ext.lower() in [".mgh", ".curv", ".gii", ".nii", ".nii.gz"]:
                        self._errors[inputs_dict[hemi]] = self.error_class(
                            ["Doesn't have proper extension"]
                        )
                        del cleaned_data[inputs_dict[hemi]]
                        return cleaned_data

                    infile = os.path.join(tmp_dir, hemi + ext)

                    print "write " + hemi
                    print surface_file.file
                    surface_file.open()
                    surface_file = StringIO(surface_file.read())
                    with open(infile, 'w') as fd:
                        surface_file.seek(0)
                        shutil.copyfileobj(surface_file, fd)

                    try:
                        if ext.lower() != ".gii":
                            out_gii = os.path.join(tmp_dir, hemi + '.gii')
                            subprocess.check_output(
                                [os.path.join(os.environ['FREESURFER_HOME'],
                                              "bin", "mris_convert"),
                                 "-c", infile,
                                 os.path.join(os.environ['FREESURFER_HOME'],
                                              "subjects", "fsaverage", "surf",
                                              hemi + ".white"),
                                 out_gii])
                        else:
                            out_gii = infile

                        gii = nb.load(out_gii)

                        if gii.darrays[0].dims != [163842]:
                            self._errors[inputs_dict[hemi]] = self.error_class(
                                ["Doesn't have proper dimensions - are you sure it's fsaverage?"]
                            )
                            del cleaned_data[inputs_dict[hemi]]
                            return cleaned_data

                        # fix intent
                        old_dict = gii.meta.metadata
                        old_dict['AnatomicalStructurePrimary'] = intent_dict[hemi]
                        gii.meta = gii.meta.from_dict(old_dict)
                        gii.to_filename(os.path.join(tmp_dir, hemi + '.gii'))

                        subprocess.check_output(
                            [os.path.join(os.environ['FREESURFER_HOME'],
                                          "bin", "mri_surf2surf"),
                             "--s", "fsaverage",
                             "--hemi", hemi,
                             "--srcsurfval",
                             os.path.join(tmp_dir, hemi+'.gii'),
                             "--trgsubject", "ICBM2009c_asym_nlin",
                             "--trgsurfval",
                             os.path.join(tmp_dir, hemi+'.MNI.gii')])
                    except CalledProcessError, e:
                        raise RuntimeError(str(e.cmd) + " returned code " +
                                           str(e.returncode) + " with output " + e.output)

                cleaned_data['surface_left_file'] = memory_uploadfile(
                    os.path.join(tmp_dir, 'lh.gii'),
                    new_name[:-7] + ".fsaverage.lh.func.gii", None)
                cleaned_data['surface_right_file'] = memory_uploadfile(
                    os.path.join(tmp_dir, 'rh.gii'),
                    new_name[:-7] + ".fsaverage.rh.func.gii", None)
                print "surf2vol"
                try:
                    subprocess.check_output(
                        [os.path.join(os.environ['FREESURFER_HOME'],
                                      "bin", "mri_surf2vol"),
                         "--subject", "ICBM2009c_asym_nlin",
                         "--o",
                         ribbon_projection_file[:-3],
                         "--so",
                         os.path.join(os.environ['FREESURFER_HOME'],
                                      "subjects", "ICBM2009c_asym_nlin", "surf", "lh.white"),
                         os.path.join(tmp_dir, 'lh.MNI.gii'),
                         "--so",
                         os.path.join(os.environ['FREESURFER_HOME'],
                                      "subjects", "ICBM2009c_asym_nlin", "surf", "rh.white"),
                         os.path.join(tmp_dir, 'rh.MNI.gii')])
                except CalledProcessError, e:
                    raise RuntimeError(str(e.cmd) + " returned code " +
                                       str(e.returncode) + " with output " + e.output)

                #fix one voxel offset
                nii = nb.load(ribbon_projection_file[:-3])
                affine = nii.affine
                affine[0, 3] -= 1
                nb.Nifti1Image(nii.get_data(), affine).to_filename(ribbon_projection_file)

                cleaned_data['file'] = memory_uploadfile(
                    ribbon_projection_file, new_name, None)
Esempio n. 8
0
                            new_data = np.squeeze(nii.get_data())
                            nii = nb.Nifti1Image(new_data, nii.get_affine(),
                                                 nii.get_header())

                        # Papaya does not handle float64, but by converting
                        # files we loose precision
                        # if nii.get_data_dtype() == np.float64:
                        # ii.set_data_dtype(np.float32)
                        new_name = fname + ".nii.gz"
                        nii_tmp = os.path.join(tmp_dir, new_name)
                        nb.save(nii, nii_tmp)

                        print "updating file in cleaned_data"

                        cleaned_data['file'] = memory_uploadfile(
                            nii_tmp, new_name, cleaned_data['file']
                        )
            finally:
                try:
                    if self.afni_subbricks:
                        # keep temp dir for AFNI slicing
                        self.afni_tmp = tmp_dir
                    else:
                        shutil.rmtree(tmp_dir)
                except OSError as exc:
                    if exc.errno != 2:  # code 2 - no such file or directory
                        raise  # re-raise exception
        elif not getattr(self, 'partial', False):
            # Skip validation error if this is a partial update from the API
            raise ValidationError("Couldn't read uploaded file")
Esempio n. 9
0
    def clean_and_validate(self, cleaned_data):
        file = cleaned_data.get('file')

        if file:
            # check extension of the data file
            _, fname, ext = split_filename(file.name)
            if not ext.lower() in [".nii.gz", ".nii", ".img"]:
                self._errors["file"] = self.error_class(
                    ["Doesn't have proper extension"])
                del cleaned_data["file"]
                return cleaned_data

            # prepare file to loading into memory
            file.open()
            fileobj = file.file
            if file.name.lower().endswith(".gz"):
                fileobj = GzipFile(filename=file.name,
                                   mode='rb',
                                   fileobj=fileobj)

            file_map = {'image': nb.FileHolder(file.name, fileobj)}
            try:
                tmp_dir = tempfile.mkdtemp()
                if ext.lower() == ".img":
                    hdr_file = cleaned_data.get('hdr_file')
                    if hdr_file:
                        # check extension of the hdr file
                        _, _, hdr_ext = split_filename(hdr_file.name)
                        if not hdr_ext.lower() in [".hdr"]:
                            self._errors["hdr_file"] = self.error_class(
                                ["Doesn't have proper extension"])
                            del cleaned_data["hdr_file"]
                            return cleaned_data
                        else:
                            hdr_file.open()
                            file_map["header"] = nb.FileHolder(
                                hdr_file.name, hdr_file.file)
                    else:
                        self._errors["hdr_file"] = self.error_class(
                            [".img file requires .hdr file"])
                        del cleaned_data["hdr_file"]
                        return cleaned_data

                # check if it is really nifti
                try:
                    print file_map
                    if "header" in file_map:
                        nii = nb.Nifti1Pair.from_file_map(file_map)
                    else:
                        nii = nb.Nifti1Image.from_file_map(file_map)
                except Exception as e:
                    raise

                # detect AFNI 4D files and prepare 3D slices
                if nii is not None and detect_4D(nii):
                    self.afni_subbricks = split_4D_to_3D(nii, tmp_dir=tmp_dir)
                else:
                    squeezable_dimensions = len(
                        filter(lambda a: a not in [0, 1], nii.shape))

                    if squeezable_dimensions != 3:
                        self._errors["file"] = self.error_class([
                            "4D files are not supported.\n "
                            "If it's multiple maps in one "
                            "file please split them and "
                            "upload separately"
                        ])
                        del cleaned_data["file"]
                        return cleaned_data

                    # convert to nii.gz if needed
                    if (ext.lower() != ".nii.gz"
                            or squeezable_dimensions < len(nii.shape)):
                        # convert pseudo 4D to 3D
                        if squeezable_dimensions < len(nii.shape):
                            new_data = np.squeeze(nii.get_data())
                            nii = nb.Nifti1Image(new_data, nii.get_affine(),
                                                 nii.get_header())

                        # Papaya does not handle float64, but by converting
                        # files we loose precision
                        # if nii.get_data_dtype() == np.float64:
                        # ii.set_data_dtype(np.float32)
                        new_name = fname + ".nii.gz"
                        nii_tmp = os.path.join(tmp_dir, new_name)
                        nb.save(nii, nii_tmp)

                        print "updating file in cleaned_data"

                        cleaned_data['file'] = memory_uploadfile(
                            nii_tmp, new_name, cleaned_data['file'])
            finally:
                try:
                    if self.afni_subbricks:
                        # keep temp dir for AFNI slicing
                        self.afni_tmp = tmp_dir
                    else:
                        print "removing %s" % tmp_dir
                        shutil.rmtree(tmp_dir)
                except OSError as exc:
                    if exc.errno != 2:  # code 2 - no such file or directory
                        raise  # re-raise exception
        elif not getattr(self, 'partial', False):
            # Skip validation error if this is a partial update from the API
            raise ValidationError("Couldn't read uploaded file")

        return cleaned_data
Esempio n. 10
0
    def clean_and_validate(self, cleaned_data):
        file = cleaned_data.get('file')

        if file:
            # check extension of the data file
            _, fname, ext = split_filename(file.name)
            if not ext.lower() in [".nii.gz", ".nii", ".img"]:
                self._errors["file"] = self.error_class(
                    ["Doesn't have proper extension"]
                )
                del cleaned_data["file"]
                return cleaned_data

            # prepare file to loading into memory
            file.open()
            fileobj = file.file
            if file.name.lower().endswith(".gz"):
                fileobj = GzipFile(filename=file.name, mode='rb',
                                   fileobj=fileobj)

            file_map = {'image': nb.FileHolder(file.name, fileobj)}
            try:
                tmp_dir = tempfile.mkdtemp()
                if ext.lower() == ".img":
                    hdr_file = cleaned_data.get('hdr_file')
                    if hdr_file:
                        # check extension of the hdr file
                        _, _, hdr_ext = split_filename(hdr_file.name)
                        if not hdr_ext.lower() in [".hdr"]:
                            self._errors["hdr_file"] = self.error_class(
                                ["Doesn't have proper extension"])
                            del cleaned_data["hdr_file"]
                            return cleaned_data
                        else:
                            hdr_file.open()
                            file_map["header"] = nb.FileHolder(hdr_file.name,
                                                               hdr_file.file)
                    else:
                        self._errors["hdr_file"] = self.error_class(
                            [".img file requires .hdr file"]
                        )
                        del cleaned_data["hdr_file"]
                        return cleaned_data

                # check if it is really nifti
                try:
                    print file_map
                    if "header" in file_map:
                        nii = nb.Nifti1Pair.from_file_map(file_map)
                    else:
                        nii = nb.Nifti1Image.from_file_map(file_map)
                except Exception as e:
                    raise

                # detect AFNI 4D files and prepare 3D slices
                if nii is not None and detect_4D(nii):
                    self.afni_subbricks = split_4D_to_3D(nii, tmp_dir=tmp_dir)
                else:
                    squeezable_dimensions = len(
                        filter(lambda a: a not in [0, 1], nii.shape)
                    )

                    if squeezable_dimensions != 3:
                        self._errors["file"] = self.error_class(
                            ["4D files are not supported.\n "
                             "If it's multiple maps in one "
                             "file please split them and "
                             "upload separately"])
                        del cleaned_data["file"]
                        return cleaned_data

                    # convert to nii.gz if needed
                    if (ext.lower() != ".nii.gz"
                            or squeezable_dimensions < len(nii.shape)):
                        # convert pseudo 4D to 3D
                        if squeezable_dimensions < len(nii.shape):
                            new_data = np.squeeze(nii.get_data())
                            nii = nb.Nifti1Image(new_data, nii.get_affine(),
                                                 nii.get_header())

                        # Papaya does not handle float64, but by converting
                        # files we loose precision
                        # if nii.get_data_dtype() == np.float64:
                        # ii.set_data_dtype(np.float32)
                        new_name = fname + ".nii.gz"
                        nii_tmp = os.path.join(tmp_dir, new_name)
                        nb.save(nii, nii_tmp)

                        print "updating file in cleaned_data"

                        cleaned_data['file'] = memory_uploadfile(
                            nii_tmp, new_name, cleaned_data['file']
                        )
            finally:
                try:
                    if self.afni_subbricks:
                        # keep temp dir for AFNI slicing
                        self.afni_tmp = tmp_dir
                    else:
                        print "removing %s"%tmp_dir
                        shutil.rmtree(tmp_dir)
                except OSError as exc:
                    if exc.errno != 2:  # code 2 - no such file or directory
                        raise  # re-raise exception
        elif not getattr(self, 'partial', False):
            # Skip validation error if this is a partial update from the API
            raise ValidationError("Couldn't read uploaded file")

        return cleaned_data
Esempio n. 11
0
    def clean_and_validate(self, cleaned_data):
        print "enter clean_and_validate"
        file = cleaned_data.get('file')
        surface_left_file = cleaned_data.get('surface_left_file')
        surface_right_file = cleaned_data.get('surface_right_file')

        if surface_left_file and surface_right_file and not file:
            if "file" in self._errors.keys():
                del self._errors["file"]
            cleaned_data["data_origin"] = 'surface'
            tmp_dir = tempfile.mkdtemp()
            try:
                new_name = cleaned_data["name"] + ".nii.gz"
                ribbon_projection_file = os.path.join(tmp_dir, new_name)

                inputs_dict = {"lh": "surface_left_file",
                               "rh": "surface_right_file"}
                intent_dict = {"lh": "CortexLeft",
                               "rh": "CortexRight"}

                for hemi in ["lh", "rh"]:
                    print hemi
                    surface_file = cleaned_data.get(inputs_dict[hemi])
                    _, ext = splitext_nii_gz(surface_file.name)

                    if not ext.lower() in [".mgh", ".curv", ".gii", ".nii", ".nii.gz"]:
                        self._errors[inputs_dict[hemi]] = self.error_class(
                            ["Doesn't have proper extension"]
                        )
                        del cleaned_data[inputs_dict[hemi]]
                        return cleaned_data

                    infile = os.path.join(tmp_dir, hemi + ext)

                    print "write " + hemi
                    print surface_file.file
                    surface_file.open()
                    surface_file = StringIO(surface_file.read())
                    with open(infile, 'w') as fd:
                        surface_file.seek(0)
                        shutil.copyfileobj(surface_file, fd)

                    try:
                        if ext.lower() != ".gii":
                            out_gii = os.path.join(tmp_dir, hemi + '.gii')
                            subprocess.check_output(
                                [os.path.join(os.environ['FREESURFER_HOME'],
                                              "bin", "mris_convert"),
                                 "-c", infile,
                                 os.path.join(os.environ['FREESURFER_HOME'],
                                              "subjects", "fsaverage", "surf",
                                              hemi + ".white"),
                                 out_gii])
                        else:
                            out_gii = infile

                        gii = nb.load(out_gii)

                        if gii.darrays[0].dims != [163842]:
                            self._errors[inputs_dict[hemi]] = self.error_class(
                                ["Doesn't have proper dimensions - are you sure it's fsaverage?"]
                            )
                            del cleaned_data[inputs_dict[hemi]]
                            return cleaned_data

                        # fix intent
                        old_dict = gii.meta.metadata
                        old_dict['AnatomicalStructurePrimary'] = intent_dict[hemi]
                        gii.meta = gii.meta.from_dict(old_dict)
                        gii.to_filename(os.path.join(tmp_dir, hemi + '.gii'))

                        subprocess.check_output(
                            [os.path.join(os.environ['FREESURFER_HOME'],
                                          "bin", "mri_surf2surf"),
                             "--s", "fsaverage",
                             "--hemi", hemi,
                             "--srcsurfval",
                             os.path.join(tmp_dir, hemi+'.gii'),
                             "--trgsubject", "ICBM2009c_asym_nlin",
                             "--trgsurfval",
                             os.path.join(tmp_dir, hemi+'.MNI.gii')])
                    except CalledProcessError, e:
                        raise RuntimeError(str(e.cmd) + " returned code " +
                                           str(e.returncode) + " with output " + e.output)

                cleaned_data['surface_left_file'] = memory_uploadfile(
                    os.path.join(tmp_dir, 'lh.gii'),
                    new_name[:-7] + ".fsaverage.lh.func.gii", None)
                cleaned_data['surface_right_file'] = memory_uploadfile(
                    os.path.join(tmp_dir, 'rh.gii'),
                    new_name[:-7] + ".fsaverage.rh.func.gii", None)
                print "surf2vol"
                try:
                    subprocess.check_output(
                        [os.path.join(os.environ['FREESURFER_HOME'],
                                      "bin", "mri_surf2vol"),
                         "--subject", "ICBM2009c_asym_nlin",
                         "--o",
                         ribbon_projection_file[:-3],
                         "--so",
                         os.path.join(os.environ['FREESURFER_HOME'],
                                      "subjects", "ICBM2009c_asym_nlin", "surf", "lh.white"),
                         os.path.join(tmp_dir, 'lh.MNI.gii'),
                         "--so",
                         os.path.join(os.environ['FREESURFER_HOME'],
                                      "subjects", "ICBM2009c_asym_nlin", "surf", "rh.white"),
                         os.path.join(tmp_dir, 'rh.MNI.gii')])
                except CalledProcessError, e:
                    raise RuntimeError(str(e.cmd) + " returned code " +
                                       str(e.returncode) + " with output " + e.output)

                #fix one voxel offset
                nii = nb.load(ribbon_projection_file[:-3])
                affine = nii.affine
                affine[0, 3] -= 1
                nb.Nifti1Image(nii.get_data(), affine).to_filename(ribbon_projection_file)

                cleaned_data['file'] = memory_uploadfile(
                    ribbon_projection_file, new_name, None)
Esempio n. 12
0
                            new_data = np.squeeze(nii.get_data())
                            nii = nb.Nifti1Image(new_data, nii.get_affine(),
                                                 nii.get_header())

                        # Papaya does not handle float64, but by converting
                        # files we loose precision
                        # if nii.get_data_dtype() == np.float64:
                        # ii.set_data_dtype(np.float32)
                        new_name = fname + ".nii.gz"
                        nii_tmp = os.path.join(tmp_dir, new_name)
                        nb.save(nii, nii_tmp)

                        print "updating file in cleaned_data"

                        cleaned_data['file'] = memory_uploadfile(
                            nii_tmp, new_name, cleaned_data['file']
                        )
            finally:
                try:
                    if self.afni_subbricks:
                        # keep temp dir for AFNI slicing
                        self.afni_tmp = tmp_dir
                    else:
                        shutil.rmtree(tmp_dir)
                except OSError as exc:
                    if exc.errno != 2:  # code 2 - no such file or directory
                        raise  # re-raise exception
        elif not getattr(self, 'partial', False):
            # Skip validation error if this is a partial update from the API
            raise ValidationError("Couldn't read uploaded file")
Esempio n. 13
0
    def clean(self, **kwargs):

        cleaned_data = super(ImageForm, self).clean()
        file = cleaned_data.get("file")

        if file:
            # check extension of the data filr
            _, fname, ext = split_filename(file.name)
            if not ext.lower() in [".nii.gz", ".nii", ".img"]:
                self._errors["file"] = self.error_class(["Doesn't have proper extension"])
                del cleaned_data["file"]
                return cleaned_data

            try:
                tmp_dir = tempfile.mkdtemp()
                if ext.lower() == ".img":
                    hdr_file = cleaned_data.get('hdr_file')
                    if hdr_file:

                        # check extension of the hdr file
                        _, _, hdr_ext = split_filename(hdr_file.name)
                        if not hdr_ext.lower() in [".hdr"]:
                            self._errors["hdr_file"] = self.error_class(
                                ["Doesn't have proper extension"])
                            del cleaned_data["hdr_file"]
                            return cleaned_data
                        else:
                            # write the header file to a temporary directory
                            hf = open(os.path.join(tmp_dir, fname + ".hdr"), "wb")
                            hf.write(hdr_file.file.read())
                            hf.close()
                    else:
                        self._errors["hdr_file"] = self.error_class([".img files require .hdr"])
                        del cleaned_data["hdr_file"]
                        return cleaned_data

                # write the data file to a temporary directory
                nii_tmp = os.path.join(tmp_dir, fname + ext)
                f = open(nii_tmp, "wb")
                f.write(file.file.read())
                f.close()

                # check if it is really nifti
                try:
                    nii = nb.load(nii_tmp)
                except Exception as e:
                    self._errors["file"] = self.error_class([str(e)])
                    del cleaned_data["file"]
                    return cleaned_data

                # convert to nii.gz if needed
                if ext.lower() != ".nii.gz":

                    #Papaya does not handle float64, but by converting files we loose precision
                    #if nii.get_data_dtype() == np.float64:
                    #ii.set_data_dtype(np.float32)
                    new_name = fname + ".nii.gz"
                    nii_tmp = os.path.join(tmp_dir, new_name)
                    nb.save(nii, nii_tmp)

                    cleaned_data['file'] = memory_uploadfile(nii_tmp, new_name,
                                                             cleaned_data['file'])

                # detect AFNI 4D files and prepare 3D slices
                if nii_tmp is not None and detect_afni4D(nii_tmp):
                    self.afni_subbricks = split_afni4D_to_3D(nii_tmp)

            finally:
                try:
                    if self.afni_subbricks:
                        self.afni_tmp = tmp_dir  # keep temp dir for AFNI slicing
                    else:
                        shutil.rmtree(tmp_dir)
                except OSError as exc:
                    if exc.errno != 2:  # code 2 - no such file or directory
                        raise  # re-raise exception
        else:
            raise ValidationError("Couldn't read uploaded file")
        return cleaned_data