Пример #1
0
def timecourse_figure():
    # Get experimental data
    newdata_1 = IfnData("20190108_pSTAT1_IFN_Bcell")
    newdata_2 = IfnData("20190119_pSTAT1_IFN_Bcell")
    newdata_3 = IfnData("20190121_pSTAT1_IFN_Bcell")
    newdata_4 = IfnData("20190214_pSTAT1_IFN_Bcell")

    # Aligned data, to get scale factors for each data set
    alignment = DataAlignment()
    alignment.add_data([newdata_4, newdata_3, newdata_2, newdata_1])
    alignment.align()
    alignment.get_scaled_data()
    mean_data = alignment.summarize_data()

    # Plot
    green = sns.color_palette("deep")[2]
    red = sns.color_palette("deep")[3]
    light_green = sns.color_palette("pastel")[2]
    light_red = sns.color_palette("pastel")[3]
    plot = TimecoursePlot((1, 1))
    plot.add_trajectory(mean_data,
                        'errorbar',
                        'o--', (0, 0),
                        label=r'10 pM IFN$\alpha$2',
                        color=light_red,
                        dose_species='Alpha',
                        doseslice=10.0,
                        alpha=0.5)
    plot.add_trajectory(mean_data,
                        'errorbar',
                        'o--', (0, 0),
                        label=r'6 pM IFN$\beta$',
                        color=light_green,
                        dose_species='Beta',
                        doseslice=6.0,
                        alpha=0.5)
    plot.add_trajectory(mean_data,
                        'errorbar',
                        'o-', (0, 0),
                        label=r'3000 pM IFN$\alpha$2',
                        color=red,
                        dose_species='Alpha',
                        doseslice=3000.0)
    plot.add_trajectory(mean_data,
                        'errorbar',
                        'o-', (0, 0),
                        label=r'2000 pM IFN$\beta$',
                        color=green,
                        dose_species='Beta',
                        doseslice=2000.0)
    fname = os.path.join(os.getcwd(), 'results', 'Figures', 'Figure_4',
                         'Timecourse.pdf')
    plot.axes.set_ylabel('pSTAT1 (MFI)')
    plot.show_figure(show_flag=False, save_flag=True, save_dir=fname)
Пример #2
0
from ifnclass.ifnplot import DoseresponsePlot, TimecoursePlot
import seaborn as sns
import numpy as np
import pandas as pd
from ifnclass.ifnfit import StepwiseFit

if __name__ == '__main__':
    # ------------------------------
    # Align all data
    # ------------------------------
    newdata_1 = IfnData("20190108_pSTAT1_IFN_Bcell")
    newdata_2 = IfnData("20190119_pSTAT1_IFN_Bcell")
    newdata_3 = IfnData("20190121_pSTAT1_IFN_Bcell")
    newdata_4 = IfnData("20190214_pSTAT1_IFN_Bcell")

    alignment = DataAlignment()
    alignment.add_data([newdata_4, newdata_3, newdata_2, newdata_1])
    alignment.align()
    alignment.get_scaled_data()
    mean_data = alignment.summarize_data()

    # -------------------------------
    # Initialize model
    # -------------------------------
    times = newdata_4.get_times('Alpha')
    doses_alpha = newdata_4.get_doses('Alpha')
    doses_beta = newdata_4.get_doses('Beta')
    Mixed_Model = IfnModel('Mixed_IFN_ppCompatible')
    Mixed_Model.set_parameters({
        'R2': 4920,
        'R1': 1200,
Пример #3
0
    #  | 1E-8   6E-10                -----------> increasing
    # C| alpha  beta                |               time
    #  | 3E-9   2E-10               |
    # D| alpha  beta                V
    #  | 1E-9   6E-11            decreasing
    # E| alpha  beta             concentration
    #  | 3E-10  2E-11
    # F| alpha  beta
    #  | 1E-10  6E-12
    # G| alpha  beta
    #  | 1E-11  2E-13
    # H| alpha  beta
    #  | 0 pM   0 pM

    # Load saved DataAlignment
    small_alignment = DataAlignment()
    small_alignment.load_from_save_file(
        'small_alignment', os.path.join(os.getcwd(), 'small_alignment'))
    large_alignment = DataAlignment()
    large_alignment.load_from_save_file(
        'large_alignment', os.path.join(os.getcwd(), 'large_alignment'))
    small_alignment.align()
    small_alignment.get_scaled_data()
    mean_small_data = small_alignment.summarize_data()
    large_alignment.align()
    large_alignment.get_scaled_data()
    mean_large_data = large_alignment.summarize_data()

    # ----------------------
    # Set up Figure layout
    # ----------------------
Пример #4
0
def _split_data(datalist, withhold):
    """Splits a list of IfnData instances into test and train subsets, placing
    <withold> percentage of the data in the test subset. The testing subset is
    then aligned using a DataAlignment instance, and the training subset is
    scaled according to the *testing* subset scale factors. The test and train
    aligned IfnData objects output by the DataAlignment.summarize_data() method
    are returned.
    """
    assert 0 <= withhold <= 100
    # Build mask which selects <withhold> points for test subset
    data_coord = _get_data_coordinates(datalist[0])
    test_size = int((100-withhold) * len(data_coord) / 100.0)
    test_idcs = np.random.choice(len(data_coord), test_size, False)
    test_coord = [data_coord[i] for i in test_idcs]
    train_coord = [c for c in data_coord if c not in test_coord]

    # Separate data into test and train subsets
    test_datalist = [d.copy() for d in datalist]
    train_datalist = [d.copy() for d in datalist]
    for obj in test_datalist:
        for c in test_coord:
            obj.data_set.loc[c[0:2]][c[2]] = np.NaN

    for obj in train_datalist:
        for c in train_coord:
            obj.data_set.loc[c[0:2]][c[2]] = np.NaN

    train_alignment = DataAlignment()
    train_alignment.add_data(train_datalist)
    train_alignment.align()
    train_alignment.get_scaled_data()
    train = train_alignment.summarize_data()

    if withhold == 0:
        test = None
    else:
        test_alignment = DataAlignment()
        test_alignment.add_data(test_datalist)
        test_alignment.scale_factors = train_alignment.scale_factors
        test_alignment.get_scaled_data()
        test = test_alignment.summarize_data()

    return train, test
Пример #5
0
        priors_dict.update({key: (mu, std)})
original_params = np.log10(original_params)
pysb_sampled_parameter_names = list(priors_dict.keys())

# -----------------------------------------------------------------------------

# -----------------------------------------------------------------------------
# Preparing experimental data
# -----------------------------------------------------------------------------
newdata_1 = IfnData("20190108_pSTAT1_IFN_Bcell")
newdata_2 = IfnData("20190119_pSTAT1_IFN_Bcell")
newdata_3 = IfnData("20190121_pSTAT1_IFN_Bcell")
newdata_4 = IfnData("20190214_pSTAT1_IFN_Bcell")
datalist = [newdata_4, newdata_3, newdata_2, newdata_1]

alignment = DataAlignment()
alignment.add_data(datalist)
alignment.align()
alignment.get_scaled_data()
mean_data = alignment.summarize_data()

# Define posterior function
posterior_obj = IFN_posterior_object(pysb_sampled_parameter_names, Mixed_Model,
                                     mean_data)

# -----------------------------------------------------------------------------


# -----------------------------------------------------------------------------
def dir_setup(dir_name, fit_flag, bootstrap_flag, post_analysis_flag):
    """Used to set up the directory where output files will be saved during
Пример #6
0
if __name__ == '__main__':
    # ------------
    # Process data
    # ------------
    small_1 = IfnData("20190108_pSTAT1_IFN_Small_Bcells")
    small_2 = IfnData("20190119_pSTAT1_IFN_Small_Bcells")
    small_3 = IfnData("20190121_pSTAT1_IFN_Small_Bcells")
    small_4 = IfnData("20190214_pSTAT1_IFN_Small_Bcells")

    large_1 = IfnData("20190108_pSTAT1_IFN_Large_Bcells")
    large_2 = IfnData("20190119_pSTAT1_IFN_Large_Bcells")
    large_3 = IfnData("20190121_pSTAT1_IFN_Large_Bcells")
    large_4 = IfnData("20190214_pSTAT1_IFN_Large_Bcells")


    small_alignment = DataAlignment()
    small_alignment.add_data([small_4, small_3, small_2, small_1])
    small_alignment.align()
    small_alignment.get_scaled_data()
    mean_small_data = small_alignment.summarize_data()

    large_alignment = DataAlignment()
    large_alignment.add_data([large_4, large_3, large_2, large_1])
    large_alignment.align()
    large_alignment.get_scaled_data()
    mean_large_data = large_alignment.summarize_data()

    # ------------
    # Plot data
    # ------------
    alpha_palette = sns.color_palette("deep", 6)
Пример #7
0
def score_params(params):
    # --------------------
    # Set up Model
    # --------------------
    Mixed_Model, DR_method = lm.load_model()
    scale_factor, DR_KWARGS = lm.SCALE_FACTOR, lm.DR_KWARGS
    Mixed_Model.set_parameters(params)

    # Make predictions
    times = [2.5, 5.0, 7.5, 10.0, 20.0, 60.0]
    alpha_doses = [10, 100, 300, 1000, 3000, 10000, 100000]
    beta_doses = [0.2, 6, 20, 60, 200, 600, 2000]

    dra60 = DR_method(times,
                      'TotalpSTAT',
                      'Ia',
                      alpha_doses,
                      parameters={'Ib': 0},
                      sf=scale_factor,
                      **DR_KWARGS)

    drb60 = DR_method(times,
                      'TotalpSTAT',
                      'Ib',
                      beta_doses,
                      parameters={'Ia': 0},
                      sf=scale_factor,
                      **DR_KWARGS)
    sim_df = IfnData('custom',
                     df=pd.concat((dra60.data_set, drb60.data_set)),
                     conditions={
                         'Alpha': {
                             'Ib': 0
                         },
                         'Beta': {
                             'Ia': 0
                         }
                     })

    # --------------------
    # Set up Data
    # --------------------
    newdata_1 = IfnData("20190108_pSTAT1_IFN_Bcell")
    newdata_2 = IfnData("20190119_pSTAT1_IFN_Bcell")
    newdata_3 = IfnData("20190121_pSTAT1_IFN_Bcell")
    newdata_4 = IfnData("20190214_pSTAT1_IFN_Bcell")

    # Aligned data, to get scale factors for each data set
    alignment = DataAlignment()
    alignment.add_data([newdata_4, newdata_3, newdata_2, newdata_1])
    alignment.align()
    alignment.get_scaled_data()
    mean_data = alignment.summarize_data()

    # --------------------
    # Score model
    # --------------------
    sim_df.drop_sigmas()
    mean_data.drop_sigmas()

    # rmse = RMSE(mean_data.data_set.values, sim_df.data_set.values)
    mae = MAE(mean_data.data_set.values, sim_df.data_set.values)
    return mae
    print(df)
    DR_Simulation = IfnData('custom', df=df, conditions={'Alpha': {'Ib': 0}})
    DR_Simulation.drop_sigmas()

    # -----------------------------------------------------------
    # Load several sets of experimental data and align them
    # -----------------------------------------------------------

    # These datasets are already prepared for use in /ifndatabase
    expdata_1 = IfnData("20190214_pSTAT1_IFN_Bcell")
    expdata_2 = IfnData("20190121_pSTAT1_IFN_Bcell")
    expdata_3 = IfnData("20190119_pSTAT1_IFN_Bcell")
    expdata_4 = IfnData("20190108_pSTAT1_IFN_Bcell")

    # Aligned data, to get scale factors for each data set
    alignment = DataAlignment()
    alignment.add_data([expdata_1, expdata_2, expdata_3, expdata_4])
    alignment.align()
    alignment.get_scaled_data()

    # Provides a new IfnData instance with standard error computed as the
    # std. dev. between experimental replicates:
    mean_data = alignment.summarize_data()

    # ---------------------------------
    # Make dose-response plot
    # ---------------------------------
    colour_palette = sns.color_palette("rocket_r", 6)

    DR_plot = DoseresponsePlot(
        (1, 1))  # shape=(1,1) since we only want 1 panel