예제 #1
0
def get_heatmap_univariate_category():
    return dbc.Container(
        [
            dcc.Loading(
                [
                    dcc.Store(
                        id="memory_univariate_category",
                    ),
                    dcc.Store(
                        id="memory_scores_univariate_category",
                        data=load_feather(
                            "age_prediction_performances/scores_all_samples_per_participant.feather"
                        ).to_dict(),
                    ),
                ]
            ),
            html.H1("Univariate XWAS - Correlations"),
            html.Br(),
            html.Br(),
            dbc.Row(
                [
                    dbc.Col(
                        [
                            get_controls_tab_univariate_category(),
                            html.Br(),
                            html.Br(),
                        ],
                        width={"size": 3},
                    ),
                    dbc.Col(
                        [
                            dcc.Loading(
                                [
                                    html.H2(id="title_univariate_category"),
                                    dcc.Graph(id="graph_univariate_category", config=DOWNLOAD_CONFIG),
                                ]
                            )
                        ],
                        width={"size": 9},
                    ),
                ]
            ),
            dbc.Row(
                [
                    dbc.Col(
                        [
                            dcc.Loading(
                                [
                                    html.H4("Histogram of the above correlations"),
                                    dcc.Graph(id="histogram_univariate_category", config=DOWNLOAD_CONFIG),
                                ]
                            )
                        ],
                        width={"size": 9, "offset": 3},
                    ),
                ]
            ),
        ],
        fluid=True,
    )
예제 #2
0
def _modify_store_univariate_category(main_category, category):
    if category == "All":
        category = f"All_{main_category}"

    return load_feather(
        f"xwas/univariate_correlations/correlations/categories/correlations_{category}.feather"
    ).to_dict()
예제 #3
0
def get_univariate_summary():
    return dbc.Container(
        [
            dcc.Loading(
                [
                    dcc.Store(
                        id="memory_univariate_summary",
                        data=load_feather("xwas/univariate_results/summary.feather").to_dict(),
                    )
                ]
            ),
            html.H1("Univariate associations - XWAS"),
            html.Br(),
            html.Br(),
            dbc.Row(
                [
                    dbc.Col([get_controls_tab(), html.Br(), html.Br()], width={"size": 3}),
                    dbc.Col(
                        [
                            html.H2(id="title_univariate_summary"),
                            dcc.Graph(id="graph_univariate_summary", config=DOWNLOAD_CONFIG),
                        ],
                        width={"size": 9},
                    ),
                ]
            ),
        ],
        fluid=True,
    )
def load_correlation(key_in_aws):
    correlation_dimension = load_feather(key_in_aws)
    correlation_dimension.drop(index=correlation_dimension.index[
        correlation_dimension["sample_size"] < 10],
                               inplace=True)

    return correlation_dimension.set_index(["category", "variable"])
def _modify_store_custom_dimensions(sample_definition):
    score_sample_definition = sample_definition
    if sample_definition == "all_samples_when_possible_otherwise_average":
        score_sample_definition = "all_samples_per_participant"
    return load_feather(
        f"age_prediction_performances/scores_{score_sample_definition}.feather"
    ).to_dict()
def _modify_store_correlations(dimension_subdimension_1,
                               dimension_subdimension_2):
    if dimension_subdimension_2 == "average":
        raise PreventUpdate
    else:
        return load_feather(
            f"xwas/univariate_correlations/correlations/dimensions/correlations_{RENAME_DIMENSIONS.get(dimension_subdimension_1, dimension_subdimension_1)}.feather"
        ).to_dict()
예제 #7
0
def get_volcano():
    return dbc.Container(
        [
            dcc.Loading(
                dcc.Store(
                    id="memory_volcano_gwas",
                    data=load_feather(
                        "genetics/gwas/size_effects.feather").to_dict())),
            html.H1("Associations - GWAS"),
            html.Br(),
            html.Br(),
            dbc.Row([
                dbc.Col(
                    [
                        get_controls_volcano_gwas(),
                        html.Br(),
                        html.Br(),
                    ],
                    width={"size": 3},
                ),
                dbc.Col(
                    [
                        dcc.Loading([
                            html.H2("Vocalno plot"),
                            dcc.Graph(id="graph_volcano_gwas",
                                      config=DOWNLOAD_CONFIG),
                        ])
                    ],
                    width={"size": 6},
                ),
            ]),
            dbc.Row([
                dbc.Col(
                    dcc.Loading([
                        dash_table.DataTable(
                            id="table_volcano_gwas",
                            columns=[{
                                "id": key,
                                "name": name
                            } for key, name in VOLCANO_TABLE_COLUMNS.items()],
                            style_cell={"textAlign": "left"},
                            sort_action="custom",
                            sort_mode="single",
                        )
                    ]),
                    width={
                        "size": 8,
                        "offset": 3
                    },
                )
            ]),
        ],
        fluid=True,
    )
def get_average_correlations():
    correlations_raw = load_feather(
        f"xwas/univariate_correlations/correlations.feather").set_index([
            "dimension_1", "subdimension_1", "dimension_2", "subdimension_2",
            "category"
        ])
    correlations_raw.columns = pd.MultiIndex.from_tuples(
        list(map(eval, correlations_raw.columns.tolist())),
        names=["subset_method", "correlation_type"])
    correlations_raw.reset_index(inplace=True)
    for index_dimension in [1, 2]:
        correlations_raw[
            f"squeezed_dimension_{index_dimension}"] = correlations_raw[
                f"dimension_{index_dimension}"] + correlations_raw[
                    f"subdimension_{index_dimension}"].replace("*", "")
    correlations_raw = correlations_raw.drop(columns=[
        "dimension_1", "subdimension_1", "dimension_2", "subdimension_2"
    ]).set_index(["category", "squeezed_dimension_1", "squeezed_dimension_2"])

    list_indexes = []
    for dimension in ["Musculoskeletal"]:
        for category in FULL_CATEGORY:
            list_indexes.append([dimension, category])
    indexes = pd.MultiIndex.from_tuples(list_indexes,
                                        names=["dimension", "category"])

    list_columns = []
    for subset_method in ["union"]:
        for correlation_type in ["pearson"]:
            for observation in ["mean", "std"]:
                list_columns.append(
                    [subset_method, correlation_type, observation])
    columns = pd.MultiIndex.from_tuples(
        list_columns,
        names=["subset_method", "correlation_type", "observation"])

    averages_correlations = pd.DataFrame(None, index=indexes, columns=columns)

    for category in FULL_CATEGORY:
        correlations_category = correlations_raw.loc[category,
                                                     ("union", "pearson")]

        averages_correlations.loc[("Musculoskeletal", category), (
            "union", "pearson",
            "mean")] = correlations_category.loc[PAIRS_SUBDIMENSIONS].mean()
        averages_correlations.loc[("Musculoskeletal", category), (
            "union", "pearson",
            "std")] = correlations_category.loc[PAIRS_SUBDIMENSIONS].std()

    averages_correlations.columns = map(str,
                                        averages_correlations.columns.tolist())
    return averages_correlations.reset_index()
def get_univariate_average_bars():
    return dbc.Container(
        [
            dcc.Loading([
                dcc.Store(
                    id="memory_univariate_average",
                    data=load_feather(
                        "xwas/univariate_correlations/averages_correlations.feather"
                    ).to_dict(),
                ),
                dcc.Store(id="memory_correlations_univariate_average"),
            ]),
            html.H1("Univariate XWAS - Correlations"),
            html.Br(),
            html.Br(),
            dbc.Row([
                dbc.Col(
                    [
                        get_controls_tab_univariate_average(),
                        html.Br(),
                        html.Br(),
                    ],
                    width={"size": 3},
                ),
                dbc.Col(
                    [
                        dcc.Loading([
                            html.H2(id="title_univariate_average"),
                            dcc.Graph(id="graph_univariate_average",
                                      config=DOWNLOAD_CONFIG),
                        ])
                    ],
                    width={"size": 9},
                    style={"overflowX": "scroll"},
                ),
            ]),
        ],
        fluid=True,
    )
예제 #10
0
def _modify_memory_scalars_features(dimension, subdimension, sub_subdimension):
    return load_feather(
        f"feature_importances/scalars/{dimension}_{subdimension}_{sub_subdimension}.feather"
    ).to_dict()
예제 #11
0
            ascending=is_ascending,
            inplace=True)

    return table_features[FEATURES_TABLE_COLUMNS].round(3).to_dict(
        "records"), table_correlations[
            FEATURES_CORRELATIONS_TABLE_COLUMNS].round(3).to_dict("records")


LAYOUT = dbc.Container(
    [
        dcc.Loading([
            dcc.Store(id="memory_features"),
            dcc.Store(
                id="memory_scores",
                data=load_feather(
                    "age_prediction_performances/scores_all_samples_per_participant.feather"
                ).to_dict(),
            ),
        ]),
        html.H1("Model interpretability - Scalars"),
        html.Br(),
        html.Br(),
        dbc.Row([
            dbc.Col(
                [
                    dbc.Card(get_controls_scalars_features()),
                    html.Br(),
                    html.Br(),
                    dbc.Card(get_controls_table_scalars_features()),
                ],
                width={"size": 5},
    "MusculoskeletalKnees": ["Musculoskeletal"],
    "MusculoskeletalScalars": ["Musculoskeletal"],
    "MusculoskeletalSpine": ["Musculoskeletal"],
    "PhysicalActivity": [],
}

FULL_CATEGORY = (MAIN_CATEGORIES_TO_CATEGORIES["All"] +
                 ["Phenotypic", "Genetics"] + [
                     f"All_{main_category}"
                     for main_category in MAIN_CATEGORIES_TO_CATEGORIES.keys()
                 ])

if __name__ == "__main__":
    correlations_raw = load_feather(
        f"xwas/univariate_correlations/correlations.feather").set_index([
            "dimension_1", "subdimension_1", "dimension_2", "subdimension_2",
            "category"
        ])
    correlations_raw.columns = pd.MultiIndex.from_tuples(
        list(map(eval, correlations_raw.columns.tolist())),
        names=["subset_method", "correlation_type"])
    correlations_raw.reset_index(inplace=True)
    for index_dimension in [1, 2]:
        correlations_raw[
            f"squeezed_dimension_{index_dimension}"] = correlations_raw[
                f"dimension_{index_dimension}"] + correlations_raw[
                    f"subdimension_{index_dimension}"].replace("*", "")
    correlations_raw = correlations_raw.drop(columns=[
        "dimension_1", "subdimension_1", "dimension_2", "subdimension_2"
    ]).set_index(["category", "squeezed_dimension_1", "squeezed_dimension_2"])
예제 #13
0
def get_data():
    return load_feather("datasets/time_series/information.feather").to_dict()
예제 #14
0
def _modify_store_features_multivariate(dimension_subdimension, category):
    return load_feather(
        f"xwas/multivariate_feature_importances/dimension_category/features_{RENAME_DIMENSIONS.get(dimension_subdimension, dimension_subdimension)}_{category}.feather"
    ).to_dict()
예제 #15
0
def get_data():
    return load_feather("datasets/videos/information.feather").to_dict()
예제 #16
0
def _modify_store_lower_comparison(uni_or_multi, category):
    return load_feather(
        f"xwas/{uni_or_multi}_correlations/correlations/categories/correlations_{category}.feather"
    ).to_dict()
def _modify_store_dimension(dimension_subdimension):
    return load_feather(
        f"xwas/univariate_correlations/correlations/dimensions/correlations_{RENAME_DIMENSIONS.get(dimension_subdimension, dimension_subdimension)}.feather"
    ).to_dict()
예제 #18
0
from dash_website.utils.aws_loader import load_feather, upload_file
from dash_website import DIMENSIONS, RENAME_DIMENSIONS, MAIN_CATEGORIES_TO_CATEGORIES

SQUEEZED_DIMENSIONS = load_feather(
    "xwas/squeezed_dimensions_participant_and_time_of_examination.feather"
).set_index(["squeezed_dimensions"])

if __name__ == "__main__":
    correlations = load_feather(
        "xwas/univariate_correlations/correlations.feather")

    correlations.set_index(["dimension_1", "subdimension_1"], inplace=True)

    for squeezed_dimension in DIMENSIONS:
        dimension_1, subdimension_1 = SQUEEZED_DIMENSIONS.loc[
            squeezed_dimension, ["dimension", "subdimension"]]
        correlations.loc[(
            dimension_1, subdimension_1
        )].reset_index(drop=True).rename(columns={
            "dimension_2": "dimension",
            "subdimension_2": "subdimension"
        }).to_feather(
            f"all_data/xwas/univariate_correlations/correlations/dimensions/correlations_{RENAME_DIMENSIONS.get(squeezed_dimension, squeezed_dimension)}.feather"
        )
        upload_file(
            f"all_data/xwas/univariate_correlations/correlations/dimensions/correlations_{RENAME_DIMENSIONS.get(squeezed_dimension, squeezed_dimension)}.feather",
            f"xwas/univariate_correlations/correlations/dimensions/correlations_{RENAME_DIMENSIONS.get(squeezed_dimension, squeezed_dimension)}.feather",
        )

    correlations.reset_index(inplace=True)
    correlations.set_index("category", inplace=True)
from dash_website.utils.aws_loader import load_feather
from dash_website import DIMENSIONS, MAIN_CATEGORIES_TO_CATEGORIES

DICT_TO_CHANGE_DIMENSIONS = {"ImmuneSystem": "BloodCells"}
DICT_TO_CHANGE_CATEGORIES = {
    "HeartSize": "HeartFunction",
    "AnthropometryImpedance": "Impedance",
    "AnthropometryBodySize": "Anthropometry",
    # Main categories
    "All_Phenotypes": "All_ClinicalPhenotypes",
}

if __name__ == "__main__":
    correlations = load_feather(
        "xwas/multivariate_correlations/correlations/correlations.feather")

    correlations_cleaned_dimensions_1 = correlations.set_index(
        ["dimension_1", "dimension_2",
         "category"]).rename(index=DICT_TO_CHANGE_DIMENSIONS,
                             level="dimension_1")
    correlations_cleaned_dimensions = correlations_cleaned_dimensions_1.rename(
        index=DICT_TO_CHANGE_DIMENSIONS, level="dimension_2")
    correlations_cleaned = correlations_cleaned_dimensions.rename(
        index=DICT_TO_CHANGE_CATEGORIES, level="category")
    correlations_cleaned.reset_index().to_feather(
        "data/xwas/multivariate_correlations/correlations/correlations.feather"
    )

    for dimension in DIMENSIONS:
        if dimension in DICT_TO_CHANGE_DIMENSIONS.keys():
            dimension = DICT_TO_CHANGE_DIMENSIONS[dimension]
예제 #20
0
def _modify_store_scalars(dimension, subdimension, sub_subdimension):
    return load_feather(f"datasets/scalars/{dimension}_{subdimension}_{sub_subdimension}.feather").to_dict()
import pandas as pd
import numpy as np
from tqdm import tqdm

from dash_website.utils.aws_loader import load_feather
from dash_website import DIMENSIONS, MAIN_CATEGORIES_TO_CATEGORIES, RENAME_DIMENSIONS

SQUEEZED_DIMENSIONS = load_feather(
    "xwas/squeezed_dimensions_participant_and_time_of_examination.feather"
).set_index("squeezed_dimensions")


def load_correlation(key_in_aws):
    correlation_dimension = load_feather(key_in_aws)
    correlation_dimension.drop(index=correlation_dimension.index[
        correlation_dimension["sample_size"] < 10],
                               inplace=True)

    return correlation_dimension.set_index(["category", "variable"])


if __name__ == "__main__":
    list_indexes = []
    for squeezed_dimension_1 in DIMENSIONS:
        for squeezed_dimension_2 in DIMENSIONS:
            for category in MAIN_CATEGORIES_TO_CATEGORIES["All"] + [
                    f"All_{main_category}"
                    for main_category in MAIN_CATEGORIES_TO_CATEGORIES.keys()
            ]:
                list_indexes.append(
                    [squeezed_dimension_1, squeezed_dimension_2, category])
예제 #22
0
def get_data_all_dimensions(sample_definition):
    return load_feather(
        f"correlation_between_accelerated_aging_dimensions/all_dimensions_{sample_definition}.feather"
    ).to_dict()
            "t": 0
        },
    )

    return (
        fig,
        f"Average heritability = {heritability['h2'].mean().round(3)} +- {heritability['h2'].std().round(3)}",
    )


LAYOUT = dbc.Container(
    [
        dcc.Loading(
            dcc.Store(
                id="memory_heritability",
                data=load_feather(
                    "genetics/heritability/heritability.feather").to_dict())),
        html.H1("Heritability - GWAS"),
        html.Br(),
        html.Br(),
        dbc.Row([
            dbc.Col(
                [
                    get_controls_heritability(),
                    html.Br(),
                    html.Br(),
                ],
                width={"size": 3},
            ),
            dbc.Col(
                [
                    dcc.Loading([
예제 #24
0
import pandas as pd
import numpy as np

from dash_website.utils.aws_loader import load_feather
from dash_website import DOWNLOAD_CONFIG, ALGORITHMS
from dash_website.age_prediction_performances import SCORES

if __name__ == "__main__":
    metric = "rmse"
    scores = load_feather(
        f"age_prediction_performances/scores_all_samples_per_participant.feather"
    ).set_index(["dimension", "subdimension", "sub_subdimension"])

    scores = scores.loc[[
        ("BloodCells", "BloodCount", "Scalars"),
        ("Biochemistry", "Blood", "Scalars"),
        ("Biochemistry", "Urine", "Scalars"),
    ]]

    import plotly.graph_objs as go

    sorted_dimensions = scores.index.drop_duplicates()

    x_positions = pd.DataFrame(np.arange(5, 10 * len(sorted_dimensions) + 5,
                                         10),
                               index=sorted_dimensions,
                               columns=["x_position"])

    fig = go.Figure()
    fig.update_layout(
        xaxis={
예제 #25
0
            inplace=True)

    return table_features[FEATURES_TABLE_COLUMNS].round(5).to_dict(
        "records"), table_correlations[
            FEATURES_CORRELATIONS_TABLE_COLUMNS].round(5).to_dict("records")


LAYOUT = dbc.Container(
    [
        dcc.Loading([
            dcc.Store(id="memory_features_multivariate"),
            dcc.Store(
                id="memory_scores_features_multivariate",
                data=load_feather(
                    f"xwas/multivariate_results/scores.feather",
                    columns=[
                        "category", "dimension", "r2", "std", "algorithm"
                    ],
                ).to_dict(),
            ),
        ]),
        html.H1("Accelerated aging prediction interpretability - XWAS"),
        html.Br(),
        html.Br(),
        dbc.Row([
            dbc.Col(
                [
                    get_controls_features_multivariate(),
                    html.Br(),
                    html.Br(),
                    get_controls_table_features_multivariate(),
                ],
    squeezed_dimensions.reset_index(inplace=True)
    squeezed_dimensions["squeezed_dimensions"] = squeezed_dimensions[
        "dimension"] + squeezed_dimensions["subdimension"].replace("*", "")
    squeezed_dimensions["squeezed_dimensions"].replace(
        {
            "*": "set",
            "*instances01": "set_instances01",
            "*instances1.5x": "set_instances1.5x",
            "*instances23": "set_instances23",
        },
        inplace=True,
    )
    squeezed_dimensions.set_index("squeezed_dimensions", inplace=True)

    every_correlation = load_feather(
        f"xwas/univariate_correlations/correlations/correlations.feather"
    ).set_index("category")

    for category in tqdm(EVERY_CATEGORIES):
        correlations = (every_correlation.loc[category].reset_index(
            drop=True).rename(
                columns={
                    "dimension_1": "dimensions_1",
                    "dimension_2": "dimensions_2"
                }))

        for idx_dimension in ["1", "2"]:
            correlations.set_index(f"dimensions_{idx_dimension}", inplace=True)
            correlations[f"dimension_{idx_dimension}"] = squeezed_dimensions[
                "dimension"]
            correlations[
예제 #27
0
            f"../data/feature_importances/videos/{chamber_type}_chambers/{sex}/{age_group}/{aging_rate}.gif",
            still=
            f"../data/feature_importances/videos/{chamber_type}_chambers/{sex}/{age_group}/{aging_rate}.png",
        ),
        style={"padding-left": 400},
    )
    return gif_display, title


LAYOUT = dbc.Container(
    [
        dcc.Loading([
            dcc.Store(
                id="memory_videos_features",
                data=load_feather(
                    "feature_importances/videos/information.feather").to_dict(
                    ),
            ),
            dcc.Store(
                id="memory_scores_features",
                data=load_feather(
                    "age_prediction_performances/scores_all_samples_per_participant.feather"
                ).to_dict(),
            ),
        ]),
        html.H1("Model interpretability - Videos"),
        html.Br(),
        html.Br(),
        dbc.Row(get_controls_videos_features(), justify="center"),
        dbc.Row(html.Br()),
        dbc.Row(html.H2(id="title_videos_features"), justify="center"),

@APP.callback(Output("tab_content_multivariate_results", "children"),
              Input("tab_manager_multivariate_results", "active_tab"))
def _fill_tab_multivariate_results(active_tab, ):
    if active_tab == "tab_heatmap_multivariate_results":
        return get_heatmap_multivariate_results()
    else:  # active_tab == "tab_bar_plot"
        return get_bar_plot_multivariate_results()


LAYOUT = html.Div([
    dcc.Loading(
        dcc.Store(
            id="memory_scores_multivariate_results",
            data=load_feather(
                "xwas/multivariate_results/scores.feather").to_dict(),
        )),
    dbc.Tabs(
        [
            dbc.Tab(label="View heatmap",
                    tab_id="tab_heatmap_multivariate_results"),
            dbc.Tab(label="View bar plot",
                    tab_id="tab_bar_plot_multivariate_results"),
        ],
        id="tab_manager_multivariate_results",
        active_tab="tab_heatmap_multivariate_results",
    ),
    html.Div(id="tab_content_multivariate_results"),
])
from dash_website.utils.aws_loader import load_feather
from dash_website import DIMENSIONS, MAIN_CATEGORIES_TO_CATEGORIES

DICT_TO_CHANGE_DIMENSIONS = {"ImmuneSystem": "BloodCells"}
DICT_TO_CHANGE_CATEGORIES = {
    "HeartSize": "HeartFunction",
    "AnthropometryImpedance": "Impedance",
    "AnthropometryBodySize": "Anthropometry",
    # Main categories
    "All_Phenotypes": "All_ClinicalPhenotypes",
}


if __name__ == "__main__":
    linear_correlations = load_feather("xwas/univariate_results/linear_correlations.feather")

    linear_correlations_cleaned_dimensions = linear_correlations.set_index(["dimension", "category"]).rename(
        index=DICT_TO_CHANGE_DIMENSIONS, level="dimension"
    )
    linear_correlations_cleaned = linear_correlations_cleaned_dimensions.rename(
        index=DICT_TO_CHANGE_CATEGORIES, level="category"
    )
    linear_correlations_cleaned.reset_index().to_feather("data/xwas/univariate_results/linear_correlations.feather")

    for dimension in DIMENSIONS:
        if dimension in DICT_TO_CHANGE_DIMENSIONS.keys():
            dimension = DICT_TO_CHANGE_DIMENSIONS[dimension]
        linear_correlations_cleaned.loc[dimension].reset_index().to_feather(
            f"data/xwas/univariate_results/linear_correlations_{dimension}.feather"
        )
예제 #30
0
        image_to_display = Image.alpha_composite(
            composite_image, Image.fromarray(images[display_mode[2]]))

    buffer = BytesIO()
    image_to_display.save(buffer, format="png")

    encoded_image = base64.b64encode(buffer.getvalue())

    return f"data:image/png;base64,{encoded_image.decode()}"


LAYOUT = dbc.Container(
    [
        dcc.Loading([
            dcc.Store(id="memory_images_features",
                      data=load_feather(
                          "datasets/images/information.feather").to_dict()),
            dcc.Store(
                id="memory_scores_features",
                data=load_feather(
                    "age_prediction_performances/scores_all_samples_per_participant.feather"
                ).to_dict(),
            ),
        ]),
        html.H1("Model interpretability - Images"),
        html.Br(),
        html.Br(),
        dbc.Row(dbc.Col(dbc.Card(get_controls_images_features())),
                justify="center"),
        dbc.Row(html.Br()),
        dbc.Row(html.H2(id="title_images_features"), justify="center"),
        dbc.Row(html.Br()),