示例#1
0
def test_round_trip():
    out_f = StringIO()
    xyz0 = np.tile(np.arange(5).reshape(5, 1), (1, 3))
    xyz1 = np.tile(np.arange(5).reshape(5, 1) + 10, (1, 3))
    streams = [(xyz0, None, None), (xyz1, None, None)]
    tv.write(out_f, streams, {})
    out_f.seek(0)
    streams2, hdr = tv.read(out_f)
    yield assert_true, streamlist_equal(streams, streams2)
示例#2
0
def test_round_trip():
    out_f = StringIO()
    xyz0 = np.tile(np.arange(5).reshape(5,1), (1, 3))
    xyz1 = np.tile(np.arange(5).reshape(5,1) + 10, (1, 3))
    streams = [(xyz0, None, None), (xyz1, None, None)]
    tv.write(out_f, streams, {})
    out_f.seek(0)
    streams2, hdr = tv.read(out_f)
    yield assert_true, streamlist_equal(streams, streams2)
示例#3
0
def test_write():
    streams = []
    out_f = StringIO()
    tv.write(out_f, [], {})
    yield assert_equal, out_f.getvalue(), tv.empty_header().tostring()
    out_f.truncate(0)
    # Write something not-default
    tv.write(out_f, [], {'id_string':'TRACKb'})
    # read it back
    out_f.seek(0)
    streams, hdr = tv.read(out_f)
    yield assert_equal, hdr['id_string'], 'TRACKb'
    # check that we can pass none for the header
    out_f.truncate(0)
    tv.write(out_f, [])
    out_f.truncate(0)
    tv.write(out_f, [], None)
    # check that we check input values
    out_f.truncate(0)
    yield (assert_raises, tv.HeaderError,
           tv.write, out_f, [],{'id_string':'not OK'})
    yield (assert_raises, tv.HeaderError,
           tv.write, out_f, [],{'version':2})
    yield (assert_raises, tv.HeaderError,
           tv.write, out_f, [],{'hdr_size':0})
示例#4
0
def test_write():
    streams = []
    out_f = StringIO()
    tv.write(out_f, [], {})
    yield assert_equal(out_f.getvalue(), tv.empty_header().tostring())
    out_f.truncate(0)
    # Write something not-default
    tv.write(out_f, [], {'id_string': 'TRACKb'})
    # read it back
    out_f.seek(0)
    streams, hdr = tv.read(out_f)
    yield assert_equal(hdr['id_string'], 'TRACKb')
    # check that we can pass none for the header
    out_f.truncate(0)
    tv.write(out_f, [])
    out_f.truncate(0)
    tv.write(out_f, [], None)
    # check that we check input values
    out_f.truncate(0)
    yield assert_raises(tv.HeaderError, tv.write, out_f, [],
                        {'id_string': 'not OK'})
    yield assert_raises(tv.HeaderError, tv.write, out_f, [], {'version': 3})
    yield assert_raises(tv.HeaderError, tv.write, out_f, [], {'hdr_size': 0})
del streams  #,hdr

if not os.path.isfile(C_fname):

    print 'Starting LARCH ...'
    tim = time.clock()
    C, atracks = tl.larch(tracks, [50.**2, 20.**2, 5.**2], True, True)
    #tracks=[tm.downsample(t,3) for t in tracks]
    #C=pf.local_skeleton_clustering(tracks,20.)
    print 'Done in total of ', time.clock() - tim, 'seconds.'

    print 'Saving result...'
    pkl.save_pickle(C_fname, C)

    streams = [(i, None, None) for i in atracks]
    tv.write(appr_fname, streams, hdr)

else:

    print 'Loading result...'
    C = pkl.load_pickle(C_fname)

skel = []
for c in C:

    skel.append(C[c]['repz'])

print 'Showing dataset after clustering...'
r = fos.ren()
fos.clear(r)
colors = np.zeros((len(skel), 3))
del streams#,hdr

if not os.path.isfile(C_fname):

    print 'Starting LARCH ...'
    tim=time.clock()
    C,atracks=tl.larch(tracks,[50.**2,20.**2,5.**2],True,True)
    #tracks=[tm.downsample(t,3) for t in tracks]
    #C=pf.local_skeleton_clustering(tracks,20.)
    print 'Done in total of ',time.clock()-tim,'seconds.'

    print 'Saving result...'
    pkl.save_pickle(C_fname,C)    
    
    streams=[(i,None,None)for i in atracks]
    tv.write(appr_fname,streams,hdr)

else:

    print 'Loading result...'
    C=pkl.load_pickle(C_fname)

skel=[]
for c in C:

    skel.append(C[c]['repz'])
    
print 'Showing dataset after clustering...'
r=fos.ren()
fos.clear(r)
colors=np.zeros((len(skel),3))