start = time.time()
sim.szdyn(tmax=tmax,
          sample_time=0.1 * doubling_time,
          nameCRM="./data/dataCRM.csv")  #Simulating the size for all the cells
print('It took', np.int(time.time() - start), 'seconds.')

start = time.time()
sim.szdynFSP(tmax=tmax, nameFSP="./data/dataFSP.csv"
             )  #Obtaining trends using numerical FSP algorithm
print('It took', np.int(time.time() - start), 'seconds.')

sbar = np.linspace(0.5, 1.5, 100) * mean_size
cv2sz = []
deltsz = []
for i in sbar:
    sd, cv2 = sim.SdStat(
        i)  #Obtaining trends in sd vs Sb using master equation formulation
    cv2sz.append(cv2)
    deltsz.append(sd - i)

data1 = pd.read_csv("./data/dataCRM.csv")
timearray = data1.time.unique()

mnszarray = []
cvszarray = []
errcv2sz = []
errmnsz = []
df = data1
del df['time']
for m in range(len(df)):
    szs = df.loc[m, :].values.tolist()
    mean_cntr, var_cntr, std_cntr = bayesest(szs, alpha=0.95)
Exemplo n.º 2
0
                                                   alpha=0.95)
    CV2d.append(var_cntr[0] / mean_cntr[0]**2)
    delt.append(mean_cntr[0])
    sb.append(meanv0_cntr[0])
    errv = (var_cntr[1][1] - var_cntr[0]) / mean_cntr[0]**2 + 2 * (
        mean_cntr[1][1] - mean_cntr[0]) * var_cntr[0] / mean_cntr[0]**3
    errcv2d.append(errv)
    errdelt.append(mean_cntr[1][1] - mean_cntr[0])
    errsb.append(meanv0_cntr[1][1] - meanv0_cntr[0])

start = time.time()
sbar = np.linspace(0.5, 1.5, 100) * mean_size
cv2sz = []
deltsz = []
for i in sbar:
    sd, cv2 = sim.SdStat(i)
    cv2sz.append(cv2)
    deltsz.append(sd - i)
print('It took', np.int(time.time() - start), 'seconds.')

data2 = pd.read_csv("./data/dataDSM.csv")
mn = mean_size
data2 = data2[data2.time > 3 * doubling_time]
fig, ax = plt.subplots(1, 2, figsize=(12, 4))
ax[0].scatter(data2.S_b / mn, (data2.S_d - data2.S_b) / mn, s=2)

ax[0].errorbar(np.array(sb),
               np.array(delt),
               xerr=errsb,
               yerr=errdelt,
               fmt='o',