def update_all_parameter(diff): #print 'each difference - %s' % diff luc_node = int(30*diff) hcc_node = int(5) time = 10 #parameter luc_gro = int(6*diff) hcc_gro = int(2) lucG = nx.barabasi_albert_graph(luc_node, luc_gro) hccG = nx.barabasi_albert_graph(hcc_node, hcc_gro) frequency = np.array([0.9, 0.1]) G_combine =nx.Graph() G_combine = graph.merge_graph(G_combine, hccG, lucG, frequency) frequency_1 = np.array([0.5, 0.5]) G_combine_1 =nx.Graph() G_combine_1 = graph.merge_graph(G_combine_1, hccG, lucG, frequency_1) #Time series cell volume LucN = [] hccN = [] #Number of initial cell LucN0 = 100 hccN0 = 100 LucN_init = 100 hccN_init = 100 for t in range(time): LucN.append(calc.convert_volume(LucN0)) lucG = graph.update_graph(lucG, luc_gro) LucN0 = LucN_init*calc.calc_entropy(lucG, t+1) for t in range(time): hccN.append(calc.convert_volume(hccN0)) hccG = graph.update_graph(hccG, hcc_gro) hccN0 = hccN_init*calc.calc_entropy(hccG, t+1) #Mix Number of cell MixN0 = 100 MixN_init = 100 initial_populations = MixN0*frequency G_comb_gro = ((frequency*np.array([luc_gro, hcc_gro])).sum())/2 MixN = [] x = [] for t in range(time): x.append(t) MixN.append(calc.convert_volume(MixN0)) G_combine = graph.update_graph(G_combine, G_comb_gro) MixN0 = MixN_init*calc.calc_entropy(G_combine, t+1) sim_ratio = np.array(LucN)/np.array(MixN) return sim_ratio """
def update_all_parameter(diff): #print 'each difference - %s' % diff luc_node = int(30 * diff) hcc_node = int(5) time = 10 #parameter luc_gro = int(6 * diff) hcc_gro = int(2) lucG = nx.barabasi_albert_graph(luc_node, luc_gro) hccG = nx.barabasi_albert_graph(hcc_node, hcc_gro) frequency = np.array([0.9, 0.1]) G_combine = nx.Graph() G_combine = graph.merge_graph(G_combine, hccG, lucG, frequency) frequency_1 = np.array([0.5, 0.5]) G_combine_1 = nx.Graph() G_combine_1 = graph.merge_graph(G_combine_1, hccG, lucG, frequency_1) #Time series cell volume LucN = [] hccN = [] #Number of initial cell LucN0 = 100 hccN0 = 100 LucN_init = 100 hccN_init = 100 for t in range(time): LucN.append(calc.convert_volume(LucN0)) lucG = graph.update_graph(lucG, luc_gro) LucN0 = LucN_init * calc.calc_entropy(lucG, t + 1) for t in range(time): hccN.append(calc.convert_volume(hccN0)) hccG = graph.update_graph(hccG, hcc_gro) hccN0 = hccN_init * calc.calc_entropy(hccG, t + 1) #Mix Number of cell MixN0 = 100 MixN_init = 100 initial_populations = MixN0 * frequency G_comb_gro = ((frequency * np.array([luc_gro, hcc_gro])).sum()) / 2 MixN = [] x = [] for t in range(time): x.append(t) MixN.append(calc.convert_volume(MixN0)) G_combine = graph.update_graph(G_combine, G_comb_gro) MixN0 = MixN_init * calc.calc_entropy(G_combine, t + 1) sim_ratio = np.array(LucN) / np.array(MixN) return sim_ratio """
def num_read_cells(mm2): mm3 = calc.convert_volume(mm2)*10**3 luc_data = np.loadtxt('data/100-0_r.csv', delimiter=",") + 1 hcc_data = np.loadtxt('data/0-100_r.csv', delimiter=",") + 1 mix_data = np.loadtxt('data/10-90_r.csv', delimiter=",", skiprows = 1) + 1 return luc_data*mm3, hcc_data*mm3, mix_data*mm3
def num_read_cells(mm2): mm3 = calc.convert_volume(mm2) * 10**3 luc_data = np.loadtxt('data/100-0_r.csv', delimiter=",") + 1 hcc_data = np.loadtxt('data/0-100_r.csv', delimiter=",") + 1 mix_data = np.loadtxt('data/10-90_r.csv', delimiter=",", skiprows=1) + 1 return luc_data * mm3, hcc_data * mm3, mix_data * mm3
def exp_test(): #parameter luc_node = 100 time = 10 luc_gro = 10 #Generate Graph lucG = nx.barabasi_albert_graph(luc_node, luc_gro) #Time series cell volume LucN = [] #Number of initial cell LucN0 = 100 LucN_init = 100 mm2 = 43 for t in range(time): LucN.append(calc.convert_volume(LucN0)) lucG = graph.update_graph(lucG, luc_gro) LucN0 = LucN_init * math.exp(1 / 10 * (t + 1)) r_LucN, r_hccN, r_MixN = graph.num_read_cells(mm2) time_point = len(r_LucN[0]) #8 sim_tmp = len(LucN) / time_point #1.25 LucN_p = [] for t in range(time_point): LucN_p.append(LucN[int(round(t * sim_tmp))]) corr_Luc = [] for i in range(len(r_LucN)): #times of experiments tmp_Luc = np.corrcoef(r_LucN[i], LucN_p) corr_Luc.append(tmp_Luc[0, 1]) print np.average(np.array(corr_Luc)) return 0
def exp_test(): #parameter luc_node = 100 time = 10 luc_gro = 10 #Generate Graph lucG = nx.barabasi_albert_graph(luc_node, luc_gro) #Time series cell volume LucN = [] #Number of initial cell LucN0 = 100 LucN_init = 100 mm2 = 43 for t in range(time): LucN.append(calc.convert_volume(LucN0)) lucG = graph.update_graph(lucG, luc_gro) LucN0 = LucN_init*math.exp(1/10*(t+1)) r_LucN, r_hccN, r_MixN = graph.num_read_cells(mm2) time_point = len(r_LucN[0])#8 sim_tmp = len(LucN)/time_point #1.25 LucN_p = [] for t in range(time_point): LucN_p.append(LucN[int(round(t*sim_tmp))]) corr_Luc = [] for i in range(len(r_LucN)): #times of experiments tmp_Luc = np.corrcoef(r_LucN[i], LucN_p) corr_Luc.append(tmp_Luc[0,1]) print np.average(np.array(corr_Luc)) return 0
G_combine_1 =nx.Graph() G_combine_1 = graph.merge_graph(G_combine_1, hccG, lucG, frequency_1) #Time series cell volume LucN = [] hccN = [] #Number of initial cell LucN0 = 10**4 hccN0 = 10**4 LucN_init = 10**4 hccN_init = 10**4 for t in range(time): LucN.append(calc.convert_volume(LucN0)) lucG = graph.update_graph(lucG, luc_gro) LucN0 = LucN_init*calc.calc_entropy(lucG, t+1) for t in range(time): hccN.append(calc.convert_volume(hccN0)) hccG = graph.update_graph(hccG, hcc_gro) hccN0 = hccN_init*calc.calc_entropy(hccG, t+1) #Mix Number of cell MixN0 = 10**4 MixN_init = 10**4 initial_populations = MixN0*frequency G_comb_gro = ((frequency*np.array([luc_gro, hcc_gro])).sum())/2 MixN = [] x = []
def calculate_mm3(cells, mm2): cells_per_mm3 = calc.convert_volume(mm2)*10*3 return cells/cells_per_mm3
def calculate_mm3(cells, mm2): cells_per_mm3 = calc.convert_volume(mm2) * 10 * 3 return cells / cells_per_mm3