def run_gq_sims(sample_data=[35]):

    for simfile in [simdata[sample] for sample in sample_data]:
    
        dataname = simfile
        print dataname
    
        sim_data=np.loadtxt(simdir+dataname)
    
        marta_table_fname='/home/ian/Data/SimData/Dir_and_bvals_DSI_marta.txt'
        b_vals_dirs=np.loadtxt(marta_table_fname)
        bvals=b_vals_dirs[:,0]*1000
        gradients=b_vals_dirs[:,1:]


        for j in range(10):
        
            s = sim_data[10000+j,:]

            gqs = dp.GeneralizedQSampling(s.reshape((1,102)),bvals,gradients,Lambda=7)
    
            t0, t1, t2, npa = gqs.npa(s, width = 5)
            
            print t0, t1, t2, npa
            """
def gq_tn_calc_save():

    for simfile in simdata:
    
        dataname = simfile
        print dataname

        sim_data=np.loadtxt(simdir+dataname)

        marta_table_fname='/home/ian/Data/SimData/Dir_and_bvals_DSI_marta.txt'
        b_vals_dirs=np.loadtxt(marta_table_fname)
        bvals=b_vals_dirs[:,0]*1000
        gradients=b_vals_dirs[:,1:]

        gq = dp.GeneralizedQSampling(sim_data,bvals,gradients)
        gqfile = simdir+'gq/'+dataname+'.pkl'
        pkl.save_pickle(gqfile,gq)

        """
        gq.IN               gq.__doc__          gq.glob_norm_param
        gq.QA               gq.__init__         gq.odf              
        gq.__class__        gq.__module__       gq.q2odf_params
        """

        tn = dp.Tensor(sim_data,bvals,gradients)
        tnfile = simdir+'tn/'+dataname+'.pkl'
        pkl.save_pickle(tnfile,tn)

        """
        tn.ADC               tn.__init__          tn._getevals
        tn.B                 tn.__module__        tn._getevecs
        tn.D                 tn.__new__           tn._getndim
        tn.FA                tn.__reduce__        tn._getshape
        tn.IN                tn.__reduce_ex__     tn._setevals
        tn.MD                tn.__repr__          tn._setevecs
        tn.__class__         tn.__setattr__       tn.adc
        tn.__delattr__       tn.__sizeof__        tn.evals
        tn.__dict__          tn.__str__           tn.evecs
        tn.__doc__           tn.__subclasshook__  tn.fa
        tn.__format__        tn.__weakref__       tn.md
        tn.__getattribute__  tn._evals            tn.ndim
        tn.__getitem__       tn._evecs            tn.shape
        tn.__hash__          tn._getD             
        """

        """ file  has one row for every voxel, every voxel is repeating 1000
def run_gq_sims(sample_data=[35,23,46,39,40,10,37,27,21,20]):

    # results = []

    out = open('/home/ian/Data/SimVoxels/Out/'+'npa+fa','w')

    for j in range(len(sample_data)):
        
        sample = sample_data[j]

        simfile = simdata[sample]
    
        dataname = simfile
        print dataname
    
        sim_data=np.loadtxt(simdir+dataname)
    
        marta_table_fname='/home/ian/Data/SimData/Dir_and_bvals_DSI_marta.txt'
        b_vals_dirs=np.loadtxt(marta_table_fname)
        bvals=b_vals_dirs[:,0]*1000
        gradients=b_vals_dirs[:,1:]


        for j in np.vstack((np.arange(100)*1000,np.arange(100)*1000+1)).T.ravel():
        # 0,1,1000,1001,2000,2001,...
        
            s = sim_data[j,:]

            gqs = dp.GeneralizedQSampling(s.reshape((1,102)),bvals,gradients,Lambda=3.5)
            tn = dp.Tensor(s.reshape((1,102)),bvals,gradients,fit_method='LS')
    
            t0, t1, t2, npa = gqs.npa(s, width = 5)
            
            print >> out, dataname, j, npa, tn.fa()[0]
            
            """
            for (i,o) in enumerate(gqs.odf(s)):
                print i,o
            
            for (i,o) in enumerate(gqs.odf_vertices):
                print i,o
            """
        
    out.close()
def run_gq_sims(sample_data=[35, 23, 46, 39, 40, 10, 37, 27, 21, 20]):

    # results = []

    out = open('/home/ian/Data/SimVoxels/Out/' + 'npa+fa', 'w')

    for j in range(len(sample_data)):

        sample = sample_data[j]

        simfile = simdata[sample]

        dataname = simfile
        print dataname

        sim_data = np.loadtxt(simdir + dataname)

        marta_table_fname = '/home/ian/Data/SimData/Dir_and_bvals_DSI_marta.txt'
        b_vals_dirs = np.loadtxt(marta_table_fname)
        bvals = b_vals_dirs[:, 0] * 1000
        gradients = b_vals_dirs[:, 1:]

        for j in np.vstack(
            (np.arange(100) * 1000, np.arange(100) * 1000 + 1)).T.ravel():
            # 0,1,1000,1001,2000,2001,...

            s = sim_data[j, :]

            gqs = dp.GeneralizedQSampling(s.reshape((1, 102)),
                                          bvals,
                                          gradients,
                                          Lambda=3.5)
            tn = dp.Tensor(s.reshape((1, 102)),
                           bvals,
                           gradients,
                           fit_method='LS')

            t0, t1, t2, npa = gqs.npa(s, width=5)

            print >> out, dataname, j, npa, tn.fa()[0]
            """
            for (i,o) in enumerate(gqs.odf(s)):
                print i,o
            
            for (i,o) in enumerate(gqs.odf_vertices):
                print i,o
            """
            #o = gqs.odf(s)
            #v = gqs.odf_vertices
            #pole = v[t0[0]]
            #eqv = dgqs.equatorial_zone_vertices(v, pole, 5)
            #print 'Number of equatorial vertices: ', len(eqv)
            #print np.max(o[eqv]),np.min(o[eqv])
            #cos_e_pole = [np.dot(pole.T, v[i]) for i in eqv]
            #print np.min(cos1), np.max(cos1)
            #print 'equatorial max in equatorial vertices:', t1[0] in eqv
            #x =  np.cross(v[t0[0]],v[t1[0]])
            #x = x/np.sqrt(np.sum(x**2))
            #print x
            #ptchv = dgqs.patch_vertices(v, x, 5)
            #print len(ptchv)
            #eqp = eqv[np.argmin([np.abs(np.dot(v[t1[0]].T,v[p])) for p in eqv])]
            #print (eqp, o[eqp])
            #print t2[0] in ptchv, t2[0] in eqv
            #print np.dot(pole.T, v[t1[0]]), np.dot(pole.T, v[t2[0]])
            #print ptchv[np.argmin([o[v] for v in ptchv])]

            #gq_indices = np.array(gq.IN[:,0],dtype='int').reshape((100,1000))

            #gq_first_directions_in=odf_vertices[np.array(gq.IN[:,0],dtype='int')]

            #print gq_first_directions_in.shape

            #gq_results = analyze_maxima(gq_indices, gq_first_directions_in.reshape((100,1000,3)),range(100))

            #for gqi see example dicoms_2_tracks gq.IN[:,0]

            #np.set_printoptions(precision=6, suppress=True, linewidth=200, threshold=5000)

            #out = open('/home/ian/Data/SimVoxels/Out/'+'+++_'+dataname,'w')

            #results = np.hstack((np.vstack(dt_results), np.vstack(gq_results)))
            #results = np.vstack(dt_results)

            #print >> out, results[:,:]

    out.close()
#fname='/home/eg01/Data_Backup/Data/Marta/DSI/SimData/results_SNR030_isotropic'
marta_table_fname = '/home/eg01/Data_Backup/Data/Marta/DSI/SimData/Dir_and_bvals_DSI_marta.txt'
sim_data = np.loadtxt(fname)
#bvalsf='/home/eg01/Data_Backup/Data/Marta/DSI/SimData/bvals101D_float.txt'
dname = '/home/eg01/Data_Backup/Data/Frank_Eleftherios/frank/20100511_m030y_cbu100624/08_ep2d_advdiff_101dir_DSI'

#real_data,affine,bvals,gradients=dp.load_dcm_dir(dname)

b_vals_dirs = np.loadtxt(marta_table_fname)

bvals = b_vals_dirs[:, 0] * 1000
gradients = b_vals_dirs[:, 1:]

sim_data = sim_data

gq = dp.GeneralizedQSampling(sim_data, bvals, gradients)
tn = dp.Tensor(sim_data, bvals, gradients)

evals = tn.evals[0]
evecs = tn.evecs[0]

setup()
batch = pyglet.graphics.Batch()

eds = np.load(
    '/home/eg01/Devel/dipy/dipy/core/matrices/evenly_distributed_sphere_362.npz'
)

vertices = eds['vertices']
faces = eds['faces']