コード例 #1
0
def debug(labeled_stack, image_stack, min_radius, max_radiusm, prop):
    prop = clustering(prop,6)
    log('info')("clustering over")
    cell_table = prop.groupby(level='label').apply(df_average, 'intensitysum')
    cell_table.to_pickle("cell_table.pkl")
    cell_table.to_csv("cell_show.csv")
    #cell_map = labeln(properties, labeled_stack)
    del cell_table['tag']
    return cell_table
コード例 #2
0
ファイル: test.py プロジェクト: genialwang/lambda-image
def func1():
    log('info')('tiff load start...')
    rddA = tsc.loadImages('/home/wb/Microtube1-300-2-1k_12.tif', inputFormat='tif-stack')
    #rddB = tsc.loadImages('/home/wb/data/1-R/*.tif', inputFormat='tif-stack')
    #rddA = tsc.loadImages('/home/wb/data/1-L/*.tif', inputFormat='tif-stack')
    #rddB = tsc.loadImages('/home/wb/data/1-R/*.tif', inputFormat='tif-stack')
    #print rddA.collectValuesAsArray()
    #print rddA.collect()
    print rddA.collectValuesAsArray()
    #return rddA, rddB
    return rddA
    log('info')('tiff load over...')
コード例 #3
0
def clustering(prop, threshold):
    import scipy.cluster.hierarchy as hier
    log("info")("clustering start...")
    positions = prop[['x', 'y', 'z']].copy()
    print positions.values.shape
    log("info")("akka")
    cluster_idx = hier.fclusterdata(positions.values, threshold, criterion='distance')
    log("info")("ooover")
    prop['new_label'] = cluster_idx
    prop.set_index('new_label', drop=True, append=False, inplace=True)
    prop.index.name = 'label' 
    prop = prop.sort_index()
    return prop
コード例 #4
0
# Date     : 2015/07/25 16:14:09
# FileName : main.py
################################

from lambdaimage import preprocess as prep
from lambdaimage import registration as reg
from lambdaimage import fusion as fus
from pyspark import SparkContext, SparkConf
from lambdaimage import lambdaimageContext
from lambdaimage.utils.tool import exeTime, log, showsize
import numpy as np

#conf = SparkConf().setAppName('test').setMaster('local[1]').set('spark.executor.memory','2g').set('spark.driver.maxResultSize','6g').set('spark.driver.memory','8g').set('spark.local.dir','/dev/shm').set('spark.storage.memoryFraction','0.2').set('spark.default.parallelism','10')
#tsc=lambdaimageContext.start(conf=conf)
tsc = lambdaimageContext.start(master="spark://blade12:7077",appName="lambdaimage")
log('info')('tiff load start...')
rddA = tsc.loadImages('/home/wb/data/1-L/*.tif', inputFormat='tif-stack')
rddB = tsc.loadImages('/home/wb/data/1-R/*.tif', inputFormat='tif-stack')
log('info')('tiff load over...')

log('info')('intensity normalization start ...')
rddA = prep.intensity_normalization(rddA)
rddB = prep.intensity_normalization(rddB)
rddB = prep.flip(rddB)

_rddA = prep.intensity_normalization(rddA,8)
_rddB = prep.intensity_normalization(rddB,8)
log('info')('intensity normalization over ...')

log('info')('registration start ...')
vec0 = [0,0,0,1,1,0,0]
コード例 #5
0
ファイル: main.py プロジェクト: genialwang/lambda-image
from lambdaimage import fusion as fus
from lambdaimage import segmentation as seg
from lambdaimage import lambdaimageContext
from lambdaimage.utils.tool import exeTime, log
from pyspark import SparkContext, SparkConf
from parseXML import load_xml_file, get_function
import numpy as np
import time

conf = SparkConf().setAppName('test').setMaster('local[1]').set('spark.executor.memory','2g').set('spark.driver.maxResultSize','6g').set('spark.driver.memory','8g').set('spark.local.dir','/dev/shm').set('spark.storage.memoryFraction','0.2').set('spark.default.parallelism','10')
tsc=lambdaimageContext.start(conf=conf)

result = load_xml_file("./lambdaimage.xml")
count = 0

log('info')('load tiff ...')
rddA = tsc.loadImages('/home/wb/data/1-L/*.tif', inputFormat='tif-stack')
rddB = tsc.loadImages('/home/wb/data/1-R/*.tif', inputFormat='tif-stack')

log('info')('preprocess ...')
fun, para = get_function(count, result)
_rddA = eval(fun)(rddA,int(para[0]))
print fun
_rddB = eval(fun)(rddB,int(para[0]))
print fun
count += 1
fun, para = get_function(count, result)
_rddB = eval(fun)(_rddB)
print fun
rddB = eval(fun)(rddB)
print fun
コード例 #6
0
ファイル: mehi_local.py プロジェクト: genialwang/lambda-image
conf = (
    SparkConf()
    .setAppName("test")
    .setMaster("local[1]")
    .set("spark.executor.memory", "2g")
    .set("spark.driver.maxResultSize", "6g")
    .set("spark.driver.memory", "8g")
    .set("spark.local.dir", "/dev/shm")
    .set("spark.storage.memoryFraction", "0.2")
    .set("spark.default.parallelism", "10")
)
tsc = lambdaimageContext.start(conf=conf)

result = load_xml_file("./lambdaimage.xml")

log("info")("tiff load start...")
rddA = tsc.loadImages("/home/wb/data/1-L/*.tif", inputFormat="tif-stack")
rddB = tsc.loadImages("/home/wb/data/1-R/*.tif", inputFormat="tif-stack")
log("info")("tiff load over...")
log("info")("intensity normalization start ...")
rddA = prep.intensity_normalization(rddA)
rddB = prep.intensity_normalization(rddB)
rddB = prep.flip(rddB)

_rddA = prep.intensity_normalization(rddA, 8)
_rddB = prep.intensity_normalization(rddB, 8)
log("info")("intensity normalization over ...")

log("info")("registration start ...")
vec0 = [0, 0, 0, 1, 1, 0, 0]
# vec = reg.c_powell(_rddA.get(4), _rddB.get(4), vec0)