def main(args):
    ''' Reads both meshes, finds associations between nodes from both meshes
    and writes :
    - a distance map between each pair of associated nodes
    - a difference map between texture values for each pair of nodes
    If n is provided, stops the process after n random nodes'''
    import thickness as t
    import random
    m1 = t.Mesh(args.m1)
    m2 = t.Mesh(args.m2)
    t1 = gio.read(args.t1).darrays[0].data
    t2 = gio.read(args.t2).darrays[0].data

    diff = [-1] * len(m1.vertex)
    dist = [-1] * len(m1.vertex)

    if args.n:
       for i in xrange(int(args.n)):
          if i%1000==0: print i, '/', int(args.n)
          r = random.randint(0, len(m1.vertex))
          diff[r], dist[r] = compare_node(r, m1, m2, t1, t2)
    else:
       for r in xrange(len(m1.vertex)):
          if r%1000==0: print r, '/', len(m1.vertex)
          diff[r], dist[r] = compare_node(r, m1, m2, t1, t2)

    gda = gifti.GiftiDataArray.from_array(np.array(diff), intent=1001)
    g = gifti.GiftiImage(darrays=[gda])
    gio.write(g, args.difffp)

    gda = gifti.GiftiDataArray.from_array(np.array(dist), intent=1001)
    g = gifti.GiftiImage(darrays=[gda])
    gio.write(g, args.distfp)
Beispiel #2
0
def test_read_deprecated():
    with clear_and_catch_warnings() as w:
        warnings.simplefilter('always', DeprecationWarning)
        from nibabel.gifti.giftiio import read, write

        img = read(DATA_FILE1)
        assert_equal(len(w), 1)
        with InTemporaryDirectory():
            write(img, 'test.gii')
        assert_equal(len(w), 2)
Beispiel #3
0
def test_read_deprecated():
    with clear_and_catch_warnings() as w:
        warnings.simplefilter('always', DeprecationWarning)
        from nibabel.gifti.giftiio import read, write

        img = read(DATA_FILE1)
        assert_equal(len(w), 1)
        with InTemporaryDirectory():
            write(img, 'test.gii')
        assert_equal(len(w), 2)
Beispiel #4
0
def convert(inputfp, outputfp):
    print 'converting %s to %s'%(inputfp, outputfp)
    plydata = PlyData.read(inputfp)
    vertex = np.array([each for each in plydata.elements[0].data.tolist()], dtype=np.float32)
    faces = plydata.elements[1].data.tolist()
    faces = np.array([each[0] for each in faces], dtype=np.int32)
    gv = gifti.GiftiDataArray.from_array(vertex, intent=1008)
    gf = gifti.GiftiDataArray.from_array(faces, intent=1009)
    g = gifti.GiftiImage()
    g.add_gifti_data_array(gv)
    g.add_gifti_data_array(gf)
    gio.write(g, outputfp)
Beispiel #5
0
def test_gifti_dataset(fn, format_, include_nodes):
    expected_ds = _get_test_dataset(include_nodes)

    expected_ds_sa = expected_ds.copy(deep=True)
    expected_ds_sa.sa['chunks'] = [4, 3, 2, 1, 3, 2]
    expected_ds_sa.sa['targets'] = ['t%d' % i for i in xrange(6)]


    # build GIFTI file from scratch
    gifti_string = _build_gifti_string(format_, include_nodes)
    with open(fn, 'w') as f:
        f.write(gifti_string)

    # reading GIFTI file
    ds = gifti_dataset(fn)
    assert_datasets_almost_equal(ds, expected_ds)

    # test GiftiImage input
    img = nb_giftiio.read(fn)
    ds2 = gifti_dataset(img)
    assert_datasets_almost_equal(ds2, expected_ds)

    # test using Nibabel's output from write
    nb_giftiio.write(img, fn)
    ds3 = gifti_dataset(fn)
    assert_datasets_almost_equal(ds3, expected_ds)

    # test targets and chunks arguments
    ds3_sa = gifti_dataset(fn, targets=expected_ds_sa.targets,
                           chunks=expected_ds_sa.chunks)
    assert_datasets_almost_equal(ds3_sa, expected_ds_sa)

    # test map2gifti
    img2 = map2gifti(ds)
    ds4 = gifti_dataset(img2)
    assert_datasets_almost_equal(ds4, expected_ds)

    map2gifti(ds, fn, encoding=format_)
    ds5 = gifti_dataset(fn)
    assert_datasets_almost_equal(ds5, expected_ds)

    # test map2gifti with array input; nodes are not stored
    map2gifti(ds.samples, fn)
    ds6 = gifti_dataset(fn)
    if include_nodes:
        assert_raises(AssertionError, assert_datasets_almost_equal,
                      ds6, expected_ds)
    else:
        assert_datasets_almost_equal(ds6, expected_ds)

    assert_raises(TypeError, gifti_dataset, ds3_sa)
    assert_raises(TypeError, map2gifti, img, fn)
Beispiel #6
0
def test_gifti_dataset(fn, format_, include_nodes):
    expected_ds = _get_test_dataset(include_nodes)

    expected_ds_sa = expected_ds.copy(deep=True)
    expected_ds_sa.sa['chunks'] = [4, 3, 2, 1, 3, 2]
    expected_ds_sa.sa['targets'] = ['t%d' % i for i in xrange(6)]

    # build GIFTI file from scratch
    gifti_string = _build_gifti_string(format_, include_nodes)
    with open(fn, 'w') as f:
        f.write(gifti_string)

    # reading GIFTI file
    ds = gifti_dataset(fn)
    assert_datasets_almost_equal(ds, expected_ds)

    # test GiftiImage input
    img = nb_giftiio.read(fn)
    ds2 = gifti_dataset(img)
    assert_datasets_almost_equal(ds2, expected_ds)

    # test using Nibabel's output from write
    nb_giftiio.write(img, fn)
    ds3 = gifti_dataset(fn)
    assert_datasets_almost_equal(ds3, expected_ds)

    # test targets and chunks arguments
    ds3_sa = gifti_dataset(fn, targets=expected_ds_sa.targets,
                           chunks=expected_ds_sa.chunks)
    assert_datasets_almost_equal(ds3_sa, expected_ds_sa)

    # test map2gifti
    img2 = map2gifti(ds)
    ds4 = gifti_dataset(img2)
    assert_datasets_almost_equal(ds4, expected_ds)

    # test float64 and int64, which must be converted to float32 and int32
    fa = dict()
    if include_nodes:
        fa['node_indices'] = ds.fa.node_indices.astype(np.int64)

    ds_float64 = Dataset(samples=ds.samples.astype(np.float64), fa=fa)
    ds_float64_again = gifti_dataset(map2gifti(ds_float64))
    assert_equal(ds_float64_again.samples.dtype, np.float32)
    if include_nodes:
        assert_equal(ds_float64_again.fa.node_indices.dtype, np.int32)


    # test contents of GIFTI image
    assert (isinstance(img2, nb_gifti.GiftiImage))
    nsamples = ds.samples.shape[0]
    if include_nodes:
        node_arr = img2.darrays[0]
        assert_equal(node_arr.intent,
                     intent_codes.code['NIFTI_INTENT_NODE_INDEX'])
        assert_equal(node_arr.coordsys, None)
        assert_equal(node_arr.data.dtype, np.int32)
        assert_equal(node_arr.datatype, data_type_codes['int32'])

        first_data_array_pos = 1
        narrays = nsamples + 1
    else:
        first_data_array_pos = 0
        narrays = nsamples

    assert_equal(len(img.darrays), narrays)
    for i in xrange(nsamples):
        arr = img2.darrays[i + first_data_array_pos]

        # check intent code
        illegal_intents = ['NIFTI_INTENT_NODE_INDEX',
                           'NIFTI_INTENT_GENMATRIX',
                           'NIFTI_INTENT_POINTSET',
                           'NIFTI_INTENT_TRIANGLE']
        assert (arr.intent not in [intent_codes.code[s]
                                   for s in illegal_intents])

        # although the GIFTI standard is not very clear about whether
        # arrays with other intent than NODE_INDEX can have a
        # GiftiCoordSystem, FreeSurfer's mris_convert
        # does not seem to like its presence. Thus we make sure that
        # it's not there.

        assert_equal(arr.coordsys, None)
        assert_equal(arr.data.dtype, np.float32)
        assert_equal(arr.datatype, data_type_codes['float32'])



    # another test for map2gifti, setting the encoding explicitly
    map2gifti(ds, fn, encoding=format_)
    ds5 = gifti_dataset(fn)
    assert_datasets_almost_equal(ds5, expected_ds)

    # test map2gifti with array input; nodes are not stored
    map2gifti(ds.samples, fn)
    ds6 = gifti_dataset(fn)
    if include_nodes:
        assert_raises(AssertionError, assert_datasets_almost_equal,
                      ds6, expected_ds)
    else:
        assert_datasets_almost_equal(ds6, expected_ds)

    assert_raises(TypeError, gifti_dataset, ds3_sa)
    assert_raises(TypeError, map2gifti, img, fn)
Beispiel #7
0
        if pits_data_R.size == 0:
            pits_data_R = gio.read(file_pits_R).darrays[0].data
        else:
            pits_data_R += gio.read(file_pits_R).darrays[0].data
    if os.path.isfile(file_pits_L):
        count_L += 1
        if pits_data_L.size == 0:
            pits_data_L = gio.read(file_pits_L).darrays[0].data
        else:
            pits_data_L += gio.read(file_pits_L).darrays[0].data

pits_data_R_temp = pits_data_R / count_R
pits_data_L_temp = pits_data_L / count_L

total_density = (pits_data_R_temp + pits_data_L_temp) / 2
"""g_R = gio.read(file_pits_R)
g_L = gio.read(file_pits_L)
g_R.darrays[0].data = pits_data_R_temp
g_L.darrays[0].data = pits_data_L_temp
gio.write(g_R, OUTPUT+filename_R_gii)
gio.write(g_L, OUTPUT+filename_L_gii)"""

g_L_R = gio.read(file_pits_L)
g_L_R.darrays[0].data = total_density
gio.write(g_L_R, OUTPUT + filename_L_R_gii)
"""
database = 'databaseBV'
cohort = 'hcp'
OUTPUT = '/media/yl247234/SAMSUNG/'+cohort+'/'+database+'/pits_density/'
path = '/media/yl247234/SAMSUNG/'+cohort+'/'+database+'/'+cohort+'/'
temp_file_s_ids='/home/yl247234/s_ids_lists/s_ids.json'
Beispiel #8
0
def map2gifti(ds, filename=None, encoding='GIFTI_ENCODING_B64GZ'):
    """Maps data(sets) into a GiftiImage, and optionally saves it to disc.

    Parameters
    ----------
    ds : AttrDataset or numpy.ndarray
      The data to be mapepd
    filename : basestring or None, optional
      Filename to which the GiftiImage is stored
    encoding : "ASCII" or "Base64Binary" or "GZipBase64Binary", optional
      Encoding format of data

    Returns
    -------
    img : GiftiImage
      dataset contents represented in GiftiImage
    """

    darrays = []

    if isinstance(ds, np.ndarray):
        samples = ds
    elif isinstance(ds, AttrDataset):
        samples = ds.samples
        _warn_if_fmri_dataset(ds)
    else:
        raise TypeError('first argument must be AttrDataset or numpy.ndarray')

    [nsamples, nfeatures] = samples.shape

    def _get_attribute_value(ds, attr_name, keys_):
        if isinstance(ds, np.ndarray):
            # no attributes
            return None

        attr_collection = ds.__dict__.get(attr_name)

        if isinstance(keys_, basestring):
            keys_ = (keys_,)

        for key in keys_:
            if key in attr_collection:
                return attr_collection[key].value
        return None

    def _build_array(data, intent, encoding=encoding):
        return gifti.GiftiDataArray.from_array(data, intent,
                                               encoding=encoding)

    node_indices_labels = ('node_indices', 'center_ids', 'ids', 'roi_ids')
    node_indices = _get_attribute_value(ds, 'fa', node_indices_labels)

    if node_indices is not None:
        darray = _build_array(node_indices, 'NIFTI_INTENT_NODE_INDEX')
        darrays.append(darray)

    intents = _get_attribute_value(ds, 'sa', 'intents')
    for i, sample in enumerate(samples):
        intent = 'NIFTI_INTENT_NONE' if intents is None else intents[i]
        darray = _build_array(sample, intent)
        darrays.append(darray)

    image = gifti.GiftiImage(darrays=darrays)

    if filename is not None:
        giftiio.write(image, filename)

    return image
Beispiel #9
0
def map2gifti(ds, filename=None, encoding='GIFTI_ENCODING_B64GZ'):
    """Maps data(sets) into a GiftiImage, and optionally saves it to disc.

    Parameters
    ----------
    ds : AttrDataset or numpy.ndarray
      The data to be mapepd
    filename : basestring or None, optional
      Filename to which the GiftiImage is stored
    encoding : "ASCII" or "Base64Binary" or "GZipBase64Binary", optional
      Encoding format of data

    Returns
    -------
    img : GiftiImage
      dataset contents represented in GiftiImage
    """

    darrays = []

    if isinstance(ds, np.ndarray):
        samples = ds
    elif isinstance(ds, AttrDataset):
        samples = ds.samples
        _warn_if_fmri_dataset(ds)
    else:
        raise TypeError('first argument must be AttrDataset or numpy.ndarray')

    [nsamples, nfeatures] = samples.shape

    def _get_attribute_value(ds, attr_name, keys_):
        if isinstance(ds, np.ndarray):
            # no attributes
            return None

        attr_collection = ds.__dict__.get(attr_name)

        if isinstance(keys_, basestring):
            keys_ = (keys_, )

        for key in keys_:
            if key in attr_collection:
                return attr_collection[key].value
        return None

    def _build_array(data, intent, encoding=encoding):
        return gifti.GiftiDataArray.from_array(data, intent, encoding=encoding)

    node_indices_labels = ('node_indices', 'center_ids', 'ids', 'roi_ids')
    node_indices = _get_attribute_value(ds, 'fa', node_indices_labels)

    if node_indices is not None:
        darray = _build_array(node_indices, 'NIFTI_INTENT_NODE_INDEX')
        darrays.append(darray)

    intents = _get_attribute_value(ds, 'sa', 'intents')
    for i, sample in enumerate(samples):
        intent = 'NIFTI_INTENT_NONE' if intents is None else intents[i]
        darray = _build_array(sample, intent)
        darrays.append(darray)

    image = gifti.GiftiImage(darrays=darrays)

    if filename is not None:
        giftiio.write(image, filename)

    return image
Beispiel #10
0
def map2gifti(ds, filename=None, encoding='GIFTI_ENCODING_B64GZ',
              surface=None):
    """Maps data(sets) into a GiftiImage, and optionally saves it to disc.

    Parameters
    ----------
    ds : AttrDataset or numpy.ndarray
      The data to be mapepd
    filename : basestring or None, optional
      Filename to which the GiftiImage is stored
    encoding : "ASCII" or "Base64Binary" or "GZipBase64Binary", optional
      Encoding format of data
    surface : mvpa2.surf.nibabel.surf.Surface or str, optional
      Optional anatomical Surface object, or filename of anatomical surface
      file, to be stored together with the data. This should allow
      FreeSurfer's mris_convert to read files written by this function

    Returns
    -------
    img : GiftiImage
      dataset contents represented in GiftiImage
    """

    darrays = []

    if isinstance(ds, np.ndarray):
        samples = ds
    elif isinstance(ds, AttrDataset):
        samples = ds.samples
        _warn_if_fmri_dataset(ds)
    else:
        raise TypeError('first argument must be AttrDataset or numpy.ndarray')

    [nsamples, nfeatures] = samples.shape

    def _get_attribute_value(ds, attr_name, keys_):
        if isinstance(ds, np.ndarray):
            # no attributes
            return None

        attr_collection = ds.__dict__.get(attr_name)

        if isinstance(keys_, basestring):
            keys_ = (keys_,)

        for key in keys_:
            if key in attr_collection:
                return attr_collection[key].value
        return None

    def _build_array(data, intent, encoding=encoding):
        is_integer = intent == 'NIFTI_INTENT_NODE_INDEX'
        dtype = np.int32 if is_integer else np.float32

        arr = gifti.GiftiDataArray.from_array(data.astype(dtype), intent,
                                              encoding=encoding)
        # Setting the coordsys argument the constructor would set the matrix
        # to the 4x4 identity matrix, which is not desired. Instead the
        # coordsys is explicitly set to None afterwards
        arr.coordsys = None

        return arr

    node_indices_labels = ('node_indices', 'center_ids', 'ids', 'roi_ids')
    node_indices = _get_attribute_value(ds, 'fa', node_indices_labels)

    if node_indices is not None:
        darray = _build_array(node_indices, 'NIFTI_INTENT_NODE_INDEX')
        darrays.append(darray)

    intents = _get_attribute_value(ds, 'sa', 'intents')
    for i, sample in enumerate(samples):
        intent = 'NIFTI_INTENT_NONE' if intents is None else intents[i]
        darray = _build_array(sample, intent)
        darrays.append(darray)

    # if there is a surface, add it
    if surface is not None:
        surface_object = surf_from_any(surface, )
        anat_image = anat_surf_to_gifti_image(surface_object, add_indices=False)

        for darray in anat_image.darrays:
            darrays.append(darray)

    image = gifti.GiftiImage(darrays=darrays)

    if filename is not None:
        giftiio.write(image, filename)

    return image
Beispiel #11
0
            if os.path.isfile(pits_R):
                count_R += 1
                if pits_data_R.size == 0:
                    pits_data_R = gio.read(pits_R).darrays[0].data
                else:
                    pits_data_R += gio.read(pits_R).darrays[0].data
            else:
                print k
                print s_id
            if os.path.isfile(pits_L):
                count_L += 1
                if pits_data_L.size == 0:
                    pits_data_L = gio.read(pits_L).darrays[0].data
                else:
                    pits_data_L += gio.read(pits_L).darrays[0].data

        pits_data_R_temp = pits_data_R / count_R
        pits_data_L_temp = pits_data_L / count_L

        total_density = (pits_data_R_temp + pits_data_L_temp) / 2
        g_L_R = gio.read(pits_L)
        g_L_R.darrays[0].data = total_density
        gio.write(g_L_R, fname_LR)

        g_R = gio.read(pits_L)
        g_L = gio.read(pits_L)
        g_R.darrays[0].data = pits_data_R_temp
        g_L.darrays[0].data = pits_data_L_temp
        gio.write(g_R, fname_R)
        gio.write(g_L, fname_L)
    for subject in os.listdir(path_c):
        path_s =  path_c+subject+'/'
        if os.path.isdir(path_s):
            file_pits_R = path_s + 't1mri/BL/default_analysis/segmentation/mesh/surface_analysis/'+subject+'_Rwhite_pits_smoothed_on_atlas.gii'
            file_pits_L = path_s + 't1mri/BL/default_analysis/segmentation/mesh/surface_analysis/'+subject+'_Lwhite_pits_smoothed_on_atlas.gii'
            if os.path.isfile(file_pits_R):
                count_R +=1
                if pits_data_R.size == 0:
                    pits_data_R = gio.read(file_pits_R).darrays[0].data
                else:
                    pits_data_R += gio.read(file_pits_R).darrays[0].data
            if os.path.isfile(file_pits_L):
                count_L +=1       
                if pits_data_L.size == 0:
                    pits_data_L = gio.read(file_pits_L).darrays[0].data
                else:
                    pits_data_L += gio.read(file_pits_L).darrays[0].data

pits_data_R = pits_data_R/count_R
pits_data_L = pits_data_L/count_L


g_R = gio.read(file_pits_R)
g_L = gio.read(file_pits_L)
g_R.darrays[0].data = pits_data_R
g_L.darrays[0].data = pits_data_L
np.savetxt(OUTPUT+filename_R, pits_data_R)
np.savetxt(OUTPUT+filename_L, pits_data_L)
gio.write(g_R, OUTPUT+filename_R_gii)
gio.write(g_L, OUTPUT+filename_L_gii)
Beispiel #13
0
def test_gifti_dataset(fn, format_, include_nodes):
    expected_ds = _get_test_dataset(include_nodes)

    expected_ds_sa = expected_ds.copy(deep=True)
    expected_ds_sa.sa['chunks'] = [4, 3, 2, 1, 3, 2]
    expected_ds_sa.sa['targets'] = ['t%d' % i for i in xrange(6)]

    # build GIFTI file from scratch
    gifti_string = _build_gifti_string(format_, include_nodes)
    with open(fn, 'w') as f:
        f.write(gifti_string)

    # reading GIFTI file
    ds = gifti_dataset(fn)
    assert_datasets_almost_equal(ds, expected_ds)

    # test GiftiImage input
    img = nb_giftiio.read(fn)
    ds2 = gifti_dataset(img)
    assert_datasets_almost_equal(ds2, expected_ds)

    # test using Nibabel's output from write
    nb_giftiio.write(img, fn)
    ds3 = gifti_dataset(fn)
    assert_datasets_almost_equal(ds3, expected_ds)

    # test targets and chunks arguments
    ds3_sa = gifti_dataset(fn, targets=expected_ds_sa.targets,
                           chunks=expected_ds_sa.chunks)
    assert_datasets_almost_equal(ds3_sa, expected_ds_sa)

    # test map2gifti
    img2 = map2gifti(ds)
    ds4 = gifti_dataset(img2)
    assert_datasets_almost_equal(ds4, expected_ds)

    # test float64 and int64, which must be converted to float32 and int32
    fa = dict()
    if include_nodes:
        fa['node_indices'] = ds.fa.node_indices.astype(np.int64)

    ds_float64 = Dataset(samples=ds.samples.astype(np.float64), fa=fa)
    ds_float64_again = gifti_dataset(map2gifti(ds_float64))
    assert_equal(ds_float64_again.samples.dtype, np.float32)
    if include_nodes:
        assert_equal(ds_float64_again.fa.node_indices.dtype, np.int32)


    # test contents of GIFTI image
    assert (isinstance(img2, nb_gifti.GiftiImage))
    nsamples = ds.samples.shape[0]
    if include_nodes:
        node_arr = img2.darrays[0]
        assert_equal(node_arr.intent,
                     intent_codes.code['NIFTI_INTENT_NODE_INDEX'])
        assert_equal(node_arr.coordsys, None)
        assert_equal(node_arr.data.dtype, np.int32)
        assert_equal(node_arr.datatype, data_type_codes['int32'])

        first_data_array_pos = 1
        narrays = nsamples + 1
    else:
        first_data_array_pos = 0
        narrays = nsamples

    assert_equal(len(img.darrays), narrays)
    for i in xrange(nsamples):
        arr = img2.darrays[i + first_data_array_pos]

        # check intent code
        illegal_intents = ['NIFTI_INTENT_NODE_INDEX',
                           'NIFTI_INTENT_GENMATRIX',
                           'NIFTI_INTENT_POINTSET',
                           'NIFTI_INTENT_TRIANGLE']
        assert (arr.intent not in [intent_codes.code[s]
                                   for s in illegal_intents])

        # although the GIFTI standard is not very clear about whether
        # arrays with other intent than NODE_INDEX can have a
        # GiftiCoordSystem, FreeSurfer's mris_convert
        # does not seem to like its presence. Thus we make sure that
        # it's not there.

        assert_equal(arr.coordsys, None)
        assert_equal(arr.data.dtype, np.float32)
        assert_equal(arr.datatype, data_type_codes['float32'])



    # another test for map2gifti, setting the encoding explicitly
    map2gifti(ds, fn, encoding=format_)
    ds5 = gifti_dataset(fn)
    assert_datasets_almost_equal(ds5, expected_ds)

    # test map2gifti with array input; nodes are not stored
    map2gifti(ds.samples, fn)
    ds6 = gifti_dataset(fn)
    if include_nodes:
        assert_raises(AssertionError, assert_datasets_almost_equal,
                      ds6, expected_ds)
    else:
        assert_datasets_almost_equal(ds6, expected_ds)

    assert_raises(TypeError, gifti_dataset, ds3_sa)
    assert_raises(TypeError, map2gifti, img, fn)
Beispiel #14
0
def map2gifti(ds,
              filename=None,
              encoding='GIFTI_ENCODING_B64GZ',
              surface=None):
    """Maps data(sets) into a GiftiImage, and optionally saves it to disc.

    Parameters
    ----------
    ds : AttrDataset or numpy.ndarray
      The data to be mapepd
    filename : basestring or None, optional
      Filename to which the GiftiImage is stored
    encoding : "ASCII" or "Base64Binary" or "GZipBase64Binary", optional
      Encoding format of data
    surface : mvpa2.surf.nibabel.surf.Surface or str, optional
      Optional anatomical Surface object, or filename of anatomical surface
      file, to be stored together with the data. This should allow
      FreeSurfer's mris_convert to read files written by this function

    Returns
    -------
    img : GiftiImage
      dataset contents represented in GiftiImage
    """

    darrays = []

    if isinstance(ds, np.ndarray):
        samples = ds
    elif isinstance(ds, AttrDataset):
        samples = ds.samples
        _warn_if_fmri_dataset(ds)
    else:
        raise TypeError('first argument must be AttrDataset or numpy.ndarray')

    [nsamples, nfeatures] = samples.shape

    def _get_attribute_value(ds, attr_name, keys_):
        if isinstance(ds, np.ndarray):
            # no attributes
            return None

        attr_collection = ds.__dict__.get(attr_name)

        if isinstance(keys_, basestring):
            keys_ = (keys_, )

        for key in keys_:
            if key in attr_collection:
                return attr_collection[key].value
        return None

    def _build_array(data, intent, encoding=encoding):
        is_integer = intent == 'NIFTI_INTENT_NODE_INDEX'
        dtype = np.int32 if is_integer else np.float32

        arr = gifti.GiftiDataArray.from_array(data.astype(dtype),
                                              intent,
                                              encoding=encoding)
        # Setting the coordsys argument the constructor would set the matrix
        # to the 4x4 identity matrix, which is not desired. Instead the
        # coordsys is explicitly set to None afterwards
        arr.coordsys = None

        return arr

    node_indices_labels = ('node_indices', 'center_ids', 'ids', 'roi_ids')
    node_indices = _get_attribute_value(ds, 'fa', node_indices_labels)

    if node_indices is not None:
        darray = _build_array(node_indices, 'NIFTI_INTENT_NODE_INDEX')
        darrays.append(darray)

    intents = _get_attribute_value(ds, 'sa', 'intents')
    for i, sample in enumerate(samples):
        intent = 'NIFTI_INTENT_NONE' if intents is None else intents[i]
        darray = _build_array(sample, intent)
        darrays.append(darray)

    # if there is a surface, add it
    if surface is not None:
        surface_object = surf_from_any(surface, )
        anat_image = anat_surf_to_gifti_image(surface_object,
                                              add_indices=False)

        for darray in anat_image.darrays:
            darrays.append(darray)

    image = gifti.GiftiImage(darrays=darrays)

    if filename is not None:
        giftiio.write(image, filename)

    return image