# Let's start working with the objects as keys: # generate lsh keys and sort by the generated hash key link( agdataO, lsh.genlsh, lshdata, (lshparams, probabilityO, thr,['']) ) sortfile(lshdata,slshdata) timer.send('LSH') # put lsh data into buckets link( slshdata, lsh.reducelsh, candataU ) sortfile( candataU, candata ) pp.mergecand( candata ) timer.send('Reduce LSH') #candstats(dataset , bic.getinfo(agdataO)) # generate biclusters link( candata, bic.genbic, bicdata, (bic.getinfo(agdataO), probabilityO, thr, (min_rows,min_cols), sparse, False) ) timer.send('Gen. Bicluster') #ic.InClose2( dataset, bic.getinfo(agdataO), min_rows, min_cols ) #timer.send('Gen. InClose2') # Now do the same with the features as keys: ''' # generate lsh keys and sort by the generated hash key link( agdataF, lsh.genlsh, lshdata, (lshparams, probabilityF, thr,['']) ) sortfile(lshdata,slshdata) timer.send('LSH') # put lsh data into buckets link( slshdata, lsh.reducelsh, candataU ) sortfile( candataU, candata )
# generate lsh keys and sort by the generated hash key link(agdataO, lsh.genlsh, lshdata, (lshparams, probabilityO, thr, [''])) sortfile(lshdata, slshdata) timer.send('LSH') # put lsh data into buckets link(slshdata, lsh.reducelsh, candataU) sortfile(candataU, candata) pp.mergecand(candata) timer.send('Reduce LSH') #candstats(dataset , bic.getinfo(agdataO)) # generate biclusters link(candata, bic.genbic, bicdata, (bic.getinfo(agdataO), probabilityO, thr, (min_rows, min_cols), sparse, False)) timer.send('Gen. Bicluster') # Now do the same with the features as keys: # generate lsh keys and sort by the generated hash key link(agdataF, lsh.genlsh, lshdata, (lshparams, probabilityF, thr, [''])) sortfile(lshdata, slshdata) timer.send('LSH') # put lsh data into buckets link(slshdata, lsh.reducelsh, candataU) sortfile(candataU, candata) pp.mergecand(candata) timer.send('Reduce LSH')
# Let's start working with the objects as keys: # generate lsh keys and sort by the generated hash key link( agdataO, lsh.genlsh, lshdata, (lshparams, probabilityO, thr,['']) ) sortfile(lshdata,slshdata) timer.send('LSH') # put lsh data into buckets link( slshdata, lsh.reducelsh, candataU ) sortfile( candataU, candata ) pp.mergecand( candata ) timer.send('Reduce LSH') #candstats(dataset , bic.getinfo(agdataO)) # generate biclusters link( candata, bic.genbic, bicdata, (bic.getinfo(agdataO), probabilityO, thr, (min_rows,min_cols), sparse, False) ) timer.send('Gen. Bicluster') # Now do the same with the features as keys: # generate lsh keys and sort by the generated hash key link( agdataF, lsh.genlsh, lshdata, (lshparams, probabilityF, thr,['']) ) sortfile(lshdata,slshdata) timer.send('LSH') # put lsh data into buckets link( slshdata, lsh.reducelsh, candataU ) sortfile( candataU, candata ) pp.mergecand( candata ) timer.send('Reduce LSH') #candstats(dataset , bic.getinfo(agdataO))
# Let's start working with the objects as keys: # generate lsh keys and sort by the generated hash key link( agdataO, lsh.genlsh, lshdata, (lshparams, probabilityO, thr,['']) ) sortfile(lshdata,slshdata) timer.send('LSH') # put lsh data into buckets link( slshdata, lsh.reducelsh, candataU ) sortfile( candataU, candata ) pp.mergecand( candata ) timer.send('Reduce LSH') #candstats(dataset , bic.getinfo(agdataO)) # generate biclusters link( candata, bic.genbic, bicdata, (bic.getinfo(agdataO), probabilityO, thr, (min_rows,min_cols), sparse, False) ) timer.send('Gen. Bicluster') timer.close() #pp.filterbics(bicdata) pp.merge(dataset) #pp.hierclust(dataset,7) pp.uncovered(dataset, bic.getinfo(agdataO)) if __name__ == "__main__": print 'Crime '
# generate lsh keys and sort by the generated hash key link(agdataO, lsh.genlsh, lshdata, (lshparams, probabilityO, thr, [''])) sortfile(lshdata, slshdata) timer.send('LSH') # put lsh data into buckets link(slshdata, lsh.reducelsh, candataU) sortfile(candataU, candata) pp.mergecand(candata) timer.send('Reduce LSH') #candstats(dataset , bic.getinfo(agdataO)) # generate biclusters link(candata, bic.genbic, bicdata, (bic.getinfo(agdataO), probabilityO, thr, (min_rows, min_cols), sparse, False)) timer.send('Gen. Bicluster') ic.InClose2(dataset, bic.getinfo(agdataO), min_rows, min_cols) timer.send('Gen. InClose2') # Now do the same with the features as keys: # generate lsh keys and sort by the generated hash key link(agdataF, lsh.genlsh, lshdata, (lshparams, probabilityF, thr, [''])) sortfile(lshdata, slshdata) timer.send('LSH') # put lsh data into buckets link(slshdata, lsh.reducelsh, candataU)