########################
    # Build the reco testing here 
    ########################
    labels = []
    

    labels = pc.kdtree_radius(dataset)
    datasetidx_holder = lh.label_to_idxholder(labels,20) # Converts the labels list into a list of indexvalues for datasets  [ [ list of index], [list of indexes].. [] ]  

    print 'out of the tree for good'
    # Make a json to view
    if make_jsons:
        dh.MakeJson_Objects(dataset,datasetidx_holder,labels,jdir,jcount,'kd_tree', mc_dl)

    print ' AT THE END'





    #if len(dataset)>=15000 or len(dataset)==0:
        #labels = pc.walker(dataset,6,20) # Runs clustering and returns labels list 
    #else:
        #labels = pc.crawlernn(dataset, 6, 20 ) # Runs clustering and returns labels list 


    #datasetidx_holder = lh.label_to_idxholder(labels,20) # Converts the labels list into a list of indexvalues for datasets  [ [ list of index], [list of indexes].. [] ]  
Exemple #2
0
    #######
    wlabels = []
    #trackidx_holder = []
    #showeridx_holder = []

    #wlabels = pc.walker(dataset,8,20) # Runs clustering and returns labels list
    wlabels = pc.walker(dataset, nn_dist,
                        mincluster)  # Runs clustering and returns labels list

    wdatasetidx_holder = lh.label_to_idxholder(
        wlabels, mincluster
    )  # Converts the labels list into a list of indexvalues for datasets  [ [ list of index], [list of indexes].. [] ]

    if make_jsons:
        dh.MakeJson_Objects(dataset, wdatasetidx_holder, wlabels, jdir, jcount,
                            'Walker', mc_dl)

#minclusterlength = 10

    for gap_dist in xrange(120, 121, 30):
        #for gap_dist in xrange(30,150,30):
        for k_radius in xrange(8, 13, 2):
            #for k_radius in xrange(10,40,10):
            for angle_error in xrange(8, 9, 8):
                #for angle_error in xrange(8,25,8):
                for min_pdelta in xrange(2, 6, 3):
                    #for min_pdelta in xrange(2,5,3):
                    # Make a fresh labels and dataholder
                    labels = [x for x in wlabels]
                    datasetidx_holder = [x for x in wdatasetidx_holder]
Exemple #3
0
    if make_jsons:
        dh.MakeJsonReco(f, jdir, jcount, 'AlgSPT', mc_dl)

    #######
    labels = []

    labels = pc.walker(dataset, nn_dist,
                       mincluster)  # Runs clustering and returns labels list

    datasetidx_holder = lh.label_to_idxholder(
        labels, mincluster
    )  # Converts the labels list into a list of indexvalues for datasets  [ [ list of index], [list of indexes].. [] ]

    if make_jsons:
        dh.MakeJson_Objects(dataset, datasetidx_holder, labels, jdir, jcount,
                            'Walker', mc_dl)

    d, labels = st.Track_Stitcher_epts(dataset, datasetidx_holder, labels,
                                       gap_dist, k_radius, min_pdelta, AE,
                                       min_clust_length)
    datasetidx_holder = lh.label_to_idxholder(
        labels, mincluster
    )  # Converts the labels list into a list of indexvalues for datasets  [ [ list of index], [list of indexes].. [] ]

    #if make_jsons:
    #    dh.MakeJson_Objects(dataset,datasetidx_holder,labels,jdir,jcount,'Stitcher', mc_dl)

    # Now run T_S and see how things look... do we need a sweep... and if so what do we sweep on
    showeridx_holder, trackidx_holder = tss.cluster_first_length(
        dataset, datasetidx_holder, 0.998, 20, 10)
    #######
    labels = []
    #trackidx_holder = []
    #showeridx_holder = []

    #wlabels = pc.walker(dataset,8,20) # Runs clustering and returns labels list
    labels = pc.walker(dataset, nn_dist,
                       mincluster)  # Runs clustering and returns labels list

    datasetidx_holder = lh.label_to_idxholder(
        labels, mincluster
    )  # Converts the labels list into a list of indexvalues for datasets  [ [ list of index], [list of indexes].. [] ]

    if make_jsons:
        dh.MakeJson_Objects(dataset, datasetidx_holder, labels, jdir, jcount,
                            'Walker', mc_dl)

    d, labels = st.Track_Stitcher_epts(dataset, datasetidx_holder, labels,
                                       gap_dist, k_radius, min_pdelta, AE,
                                       min_clust_length)
    datasetidx_holder = lh.label_to_idxholder(
        labels, mincluster
    )  # Converts the labels list into a list of indexvalues for datasets  [ [ list of index], [list of indexes].. [] ]

    if make_jsons:
        dh.MakeJson_Objects(dataset, datasetidx_holder, labels, jdir, jcount,
                            'Stitcher', mc_dl)

    # Now run T_S and see how things look... do we need a sweep... and if so what do we sweep on
    fshoweridx_holder, ftrackidx_holder = tss.cluster_first_length(
        dataset, datasetidx_holder, 0.998, 20, 10)
Exemple #5
0
    ###########    Rebase the dataset for reclustering    ###############
    #####################################################################
    #####################################################################

    rebase_dataset = Erc.rebase_spts(f, dataset, showeridx_holder, ROI_list, 6,
                                     1000.)
    rebase_labels = [5 for x in range(len(rebase_dataset))]
    rebaseidx_holder = [[x for x in range(len(rebase_dataset))]]

    end_Rtime = datetime.now()
    delta_Rt = end_Rtime - start_Rtime
    print 'RTIME', str(delta_Rt.seconds) + ' rebase_dataset'
    start_Rtime = datetime.now()

    if make_jsons:
        dh.MakeJson_Objects(rebase_dataset, rebaseidx_holder, rebase_labels,
                            jdir, jcount, 'REBASE_ROI', mc_dl)

    end_Rtime = datetime.now()
    delta_Rt = end_Rtime - start_Rtime
    print 'RTIME', str(delta_Rt.seconds) + ' make_rebasejson'
    start_Rtime = datetime.now()

    rtrackidx_holder, rshoweridx_holder, rlabels = Er.rebase_Full_reco(
        rebase_dataset, mc_dl, jdir, jcount)
    #rtrackidx_holder , rshoweridx_holder , rlabels =Er.rebase_Full_reco(rebase_dataset,mc_dl,jdir,jcount,make_jsons,timer=True)
    end_Rtime = datetime.now()
    delta_Rt = end_Rtime - start_Rtime
    print 'RTIME ', str(delta_Rt.seconds) + ' run_rebase_Full_reco'
    start_Rtime = datetime.now()
    #print rebase_dataset