Example #1
0
    def pushButton_testing_roc_curve_clicked(self):
        dataloader_test = DataLoader(
            Dataset.get_dataset(
                self.app_model.model['testing_dataset_test_directory'],
                self.app_model.model['dataset_data_augmentation_test_enabled'],
                self.app_model.model['dataset_data_augmentation_test']),
            batch_size=self.app_model.model['training_batch_size'],
            shuffle=True)

        for model_id in self.app_model.model['testing_saved_models_selected']:
            model, loaded_classes = ModelIO.load(
                os.path.join(
                    self.app_model.model['testing_saved_models_directory'],
                    model_id))
            ModelEvaluation.plot_roc_curve(model, dataloader_test,
                                           loaded_classes)
Example #2
0
    def pushButton_testing_confusion_matrix_clicked(self):
        dataloader_test = DataLoader(
            Dataset.get_dataset(
                self.app_model.model['testing_dataset_test_directory'],
                self.app_model.model['dataset_data_augmentation_test_enabled'],
                self.app_model.model['dataset_data_augmentation_test']),
            batch_size=self.app_model.model['training_batch_size'],
            shuffle=True)

        for model_id in self.app_model.model['testing_saved_models_selected']:
            model, _ = ModelIO.load(
                os.path.join(
                    self.app_model.model['testing_saved_models_directory'],
                    model_id))
            y_true, y_pred, probabilities = ModelEvaluation.get_predictions(
                model, dataloader_test)
            PrettyPrintConfusionMatrix.plot(
                y_true, y_pred, classes=dataloader_test.dataset.classes)
Example #3
0
    def run(self):
        dataset, dataset_dataloader = Dataset.create_dataloader_for_neural_network(
            dataset_train_test_valid_directory=self.app_model.model['dataset_train_test_valid_directory'],
            dataset_train_dir_name=self.app_model.model['dataset_train_dir_name'],
            dataset_test_dir_name=self.app_model.model['dataset_test_dir_name'],
            dataset_is_test_set_enabled=self.app_model.model['dataset_is_test_set_enabled'],
            dataset_valid_dir_name=self.app_model.model['dataset_valid_dir_name'],
            dataset_data_augmentation_train_enabled=self.app_model.model['dataset_data_augmentation_train_enabled'],
            dataset_data_augmentation_train=self.app_model.model['dataset_data_augmentation_train'],
            dataset_data_augmentation_test_enabled=self.app_model.model['dataset_data_augmentation_test_enabled'],
            dataset_data_augmentation_test=self.app_model.model['dataset_data_augmentation_test'],
            dataset_data_augmentation_valid_enabled=self.app_model.model['dataset_data_augmentation_valid_enabled'],
            dataset_data_augmentation_valid=self.app_model.model['dataset_data_augmentation_valid'],
            training_batch_size=self.app_model.model['training_batch_size']
        )

        ModelTraining.train_models(dataset_dataloader=dataset_dataloader,
                                   dataset_test_dir_name=self.app_model.model.get(
                                       'dataset_test_dir_name'),
                                   dataset_train_dir_name=self.app_model.model.get(
                                       'dataset_train_dir_name'),
                                   dataset_valid_dir_name=self.app_model.model.get(
                                       'dataset_valid_dir_name'),
                                   training_cnn_models_to_train=self.app_model.model[
                                       'training_cnn_models_to_train'],
                                   training_criterion=self.app_model.model[
                                       'training_criterion'],
                                   training_dropout=self.app_model.model['training_dropout'],
                                   training_epochs_early_stopping=self.app_model.model[
                                       'training_epochs_early_stopping'],
                                   training_epochs_count=self.app_model.model[
                                       'training_epochs_count'],
                                   training_feature_extract=self.app_model.model[
                                       'training_feature_extract'],
                                   training_optimizer=self.app_model.model[
                                       'training_optimizer'],
                                   training_learning_rate=self.app_model.model[
                                       'training_learning_rate'],
                                   training_lr_gamma=self.app_model.model['training_lr_gamma'],
                                   training_lr_step_size=self.app_model.model[
                                       'training_lr_step_size'],
                                   training_model_output_directory=self.app_model.model[
                                       'training_model_output_directory'],
                                   training_momentum=self.app_model.model['training_momentum'],
                                   training_save_best_model_enabled=self.app_model.model[
                                       'training_save_best_model_enabled'],
                                   training_scheduler=self.app_model.model[
                                       'training_scheduler'],
                                   training_use_gpu=self.app_model.model['training_use_gpu'],
                                   training_use_early_stopping=self.app_model.model[
                                       'training_use_early_stopping'],
                                   training_use_pretrained_models=self.app_model.model[
                                       'training_use_pretrained_models'],
                                   training_use_softmax=self.app_model.model[
                                       'training_use_softmax'],
                                   signals={
                                       'console_append': self.console_append,
                                       'console_clear': self.console_clear,
                                       'console_replace': self.console_replace,
                                       'label_epoch_current_total_text_changed': self.label_epoch_current_total_text_changed,
                                       'label_training_model_name_text_changed': self.label_training_model_name_text_changed,
                                       'progressBar_training_set_value_changed': self.progressBar_training_set_value_changed,
                                       'plot_train_valid_acc_loss_graph': self.plot_train_valid_acc_loss_graph,
                                       'premature_training_end_triggered': self.premature_training_end_triggered
                                   },
                                   training_weight_decay=self.app_model.model['training_weight_decay'])

        self.app_model.model['premature_training_end_triggered'] = False
        self.finished.emit()
Example #4
0
####         CREATE INITIAL DATA SET(S)          ####
#####################################################
print 'Creating data sets and loading their data/plots'

dataSet = h5py.File('./data/GreenlandInBedCoord.h5', 'r')
map['x1'] = len(dataSet['bed'][:][0])
map['y1'] = len(dataSet['bed'][:])
map['proj_x1'] = dataSet['x'][:][-1]
map['proj_y1'] = dataSet['y'][:][-1]
dataSet.close()

vpts = [
]  # holds [x,y,v] values, where x,y are the coordinates and v is the velocity magnitude at those coordinates
intLines = []  # holds integration lines

velocity = Dataset('velocity', greenPlotPen, draw=True)
iiContainer.addWidget(velocity.plotWidget)
iiContainer.setCurrentWidget(velocity.plotWidget)
velocity.plotWidget.getPlotItem().getViewBox().setRange(xRange=[0, 10018],
                                                        yRange=[0, 17964],
                                                        padding=0.1)

smb = Dataset('smb', redPlotPen, draw=True)
iiContainer.addWidget(smb.plotWidget)

bed = Dataset('bed', bluePlotPen, draw=True)
iiContainer.addWidget(bed.plotWidget)

surface = Dataset('surface', greyPlotPen, draw=True)
iiContainer.addWidget(surface.plotWidget)
Example #5
0
#car_link = "ws://{}:{}".format(car_data['address'], car_data['port'])
#car_link = "ws://{}:{}".format("192.168.137.2", "8000")
#car_link = "ws://{}:{}".format("192.168.0.120", "8000")
car_link = "ws://{}:{}".format("192.168.8.103", "8000")

action_link = "{}/action".format(car_link)
state_link = "{}/state".format(car_link)

logging.debug("Action Link: " + action_link)
logging.debug("State Link: " + state_link)

car = Car(action_link, url_state=state_link)

start_time = time()
total_requests = 1
dataset = Dataset()
previous_datavector = None
timer = [0, 0]
timer_index = 0
dataset_time = None
dataset_queue = Queue()


def handle_state(data, ws):
    global total_requests, start_time, dataset, previous_datavector, timer_index, timer, dataset_time, dataset_queue
    current_datavector = pickle.loads(data)

    # Request Per Second Checker
    total_requests += 1
    timer[timer_index] += 1
    elapsed = time() - start_time
Example #6
0
def main(loc_id, loc_name, output_version):
    print('Reading in short-term outcomes...')
    ## Read in short-term outcomes
    # region -------------------------------------------------------------------

    # Durations and proportions
    dp = pd.read_csv(
        '{}WORK/12_bundle/covid/data/long_covid/long_covid_proportions_durations_with_overlaps.csv'
        .format(roots['j']))

    # Mild/Moderate
    print('  mild/moderate...')
    midmod = Dataset(loc_id,
                     loc_name,
                     output_version,
                     'midmod',
                     nf_type='long')

    # Hospital
    print('  hospital...')
    hospital = Dataset(loc_id,
                       loc_name,
                       output_version,
                       'hsp_admit',
                       nf_type='long')

    # Icu
    print('  icu...')
    icu = Dataset(loc_id,
                  loc_name,
                  output_version,
                  'icu_admit',
                  nf_type='long')

    # endregion ----------------------------------------------------------------

    print('Calculating mild/moderate incidence & prevalence...')
    ## Mild/Moderate Incidence & Prevalence
    # region -------------------------------------------------------------------
    # Shift hospitalizations 7 days
    lag_hsp = copy.deepcopy(hospital)
    lag_hsp.data = lag_hsp.data.drop(columns=['hospital_deaths'])
    lag_hsp.data.date = lag_hsp.data.date + pd.to_timedelta(
        roots['defaults']['symp_to_hsp_admit_duration'], unit='D')

    # Merge midmod and lag_hsp
    midmod.data = pd.merge(
        midmod.data,
        lag_hsp.data,
        how='left',
        on=['location_id', 'age_group_id', 'sex_id', 'draw_var', 'date'])
    del lag_hsp

    # mild/moderate at risk number = (mild/moderate incidence - hospital admissions|7 days later) |
    #                                 shift forward by {incubation period + mild/moderate duration|no hospital}
    midmod.data[
        'midmod_risk_num'] = midmod.data.midmod_inc - midmod.data.hospital_inc
    midmod.data.date = midmod.data.date + pd.to_timedelta(
        (roots['defaults']['incubation_period'] +
         roots['defaults']['midmod_duration_no_hsp']),
        unit='D')

    # Calculate the incidence of each symptom and overlap, regardless of co-occurrence of additional symptoms (not mutually exclusive)
    # mild/moderate long-term incidence = mild/moderate number at risk * proportion of mild/moderate with each long-term symptom cluster
    midmod.data['midmod_cog_inc'] = (
        midmod.data.midmod_risk_num *
        dp.loc[(dp.outcome == 'cognitive') &
               (dp.population == 'midmod'), 'proportion_mean'].values[0])
    midmod.data['midmod_fat_inc'] = (
        midmod.data.midmod_risk_num *
        dp.loc[(dp.outcome == 'fatigue') &
               (dp.population == 'midmod'), 'proportion_mean'].values[0])
    midmod.data['midmod_resp_inc'] = (
        midmod.data.midmod_risk_num *
        dp.loc[(dp.outcome == 'respiratory') &
               (dp.population == 'midmod'), 'proportion_mean'].values[0])
    midmod.data['midmod_cog_fat_inc'] = (
        midmod.data.midmod_risk_num *
        dp.loc[(dp.outcome == 'cognitive_fatigue') &
               (dp.population == 'midmod'), 'proportion_mean'].values[0])
    midmod.data['midmod_cog_resp_inc'] = (
        midmod.data.midmod_risk_num *
        dp.loc[(dp.outcome == 'cognitive_respiratory') &
               (dp.population == 'midmod'), 'proportion_mean'].values[0])
    midmod.data['midmod_fat_resp_inc'] = (
        midmod.data.midmod_risk_num *
        dp.loc[(dp.outcome == 'fatigue_respiratory') &
               (dp.population == 'midmod'), 'proportion_mean'].values[0])
    midmod.data['midmod_cog_fat_resp_inc'] = (
        midmod.data.midmod_risk_num *
        dp.loc[(dp.outcome == 'cognitive_fatigue_respiratory') &
               (dp.population == 'midmod'), 'proportion_mean'].values[0])

    # Creating mutually exclusive categories of symptoms
    # cog_inc = cog_inc - (cog_fat_inc - cog_fat_resp_inc) - (cog_resp_inc - cog_fat_resp_inc) - cog_fat_resp_inc
    midmod.data.midmod_cog_inc = (midmod.data.midmod_cog_inc -
                                  (midmod.data.midmod_cog_fat_inc -
                                   midmod.data.midmod_cog_fat_resp_inc) -
                                  (midmod.data.midmod_cog_resp_inc -
                                   midmod.data.midmod_cog_fat_resp_inc) -
                                  midmod.data.midmod_cog_fat_resp_inc)

    # fat_inc = fat_inc - (cog_fat_inc - cog_fat_resp_inc) -  (fat_resp_inc - cog_fat_resp_inc) - cog_fat_resp_inc
    midmod.data.midmod_fat_inc = (midmod.data.midmod_fat_inc -
                                  (midmod.data.midmod_cog_fat_inc -
                                   midmod.data.midmod_cog_fat_resp_inc) -
                                  (midmod.data.midmod_fat_resp_inc -
                                   midmod.data.midmod_cog_fat_resp_inc) -
                                  midmod.data.midmod_cog_fat_resp_inc)

    # resp_inc = resp_inc - (fat_resp_inc - cog_fat_resp_inc) - (cog_resp_inc - cog_fat_resp_inc) - cog_fat_resp_inc
    midmod.data.midmod_resp_inc = (midmod.data.midmod_resp_inc -
                                   (midmod.data.midmod_fat_resp_inc -
                                    midmod.data.midmod_cog_fat_resp_inc) -
                                   (midmod.data.midmod_cog_resp_inc -
                                    midmod.data.midmod_cog_fat_resp_inc) -
                                   midmod.data.midmod_cog_fat_resp_inc)

    # cog_fat_inc = cog_fat_inc - cog_fat_resp_inc
    midmod.data.midmod_cog_fat_inc = (midmod.data.midmod_cog_fat_inc -
                                      midmod.data.midmod_cog_fat_resp_inc)

    # cog_resp_inc = cog_resp_inc - cog_fat_resp_inc
    midmod.data.midmod_cog_resp_inc = (midmod.data.midmod_cog_resp_inc -
                                       midmod.data.midmod_cog_fat_resp_inc)

    # fat_resp_inc = fat_resp_inc - cog_fat_resp_inc
    midmod.data.midmod_fat_resp_inc = (midmod.data.midmod_fat_resp_inc -
                                       midmod.data.midmod_cog_fat_resp_inc)

    # mild/moderate long-term prevalence = mild/moderate long-term incidence * [duration]
    midmod.data = calc_prev(df=midmod.data,
                            dp=dp,
                            dst_population='midmod',
                            dst_outcome='cognitive',
                            calc_col_stub='midmod_cog_')
    midmod.data = calc_prev(df=midmod.data,
                            dp=dp,
                            dst_population='midmod',
                            dst_outcome='fatigue',
                            calc_col_stub='midmod_fat_')
    midmod.data = calc_prev(df=midmod.data,
                            dp=dp,
                            dst_population='midmod',
                            dst_outcome='respiratory',
                            calc_col_stub='midmod_resp_')
    midmod.data = calc_prev(df=midmod.data,
                            dp=dp,
                            dst_population='midmod',
                            dst_outcome='cognitive_fatigue',
                            calc_col_stub='midmod_cog_fat_')
    midmod.data = calc_prev(df=midmod.data,
                            dp=dp,
                            dst_population='midmod',
                            dst_outcome='cognitive_respiratory',
                            calc_col_stub='midmod_cog_resp_')
    midmod.data = calc_prev(df=midmod.data,
                            dp=dp,
                            dst_population='midmod',
                            dst_outcome='fatigue_respiratory',
                            calc_col_stub='midmod_fat_resp_')
    midmod.data = calc_prev(df=midmod.data,
                            dp=dp,
                            dst_population='midmod',
                            dst_outcome='cognitive_fatigue_respiratory',
                            calc_col_stub='midmod_cog_fat_resp_')

    # Drop unneeded cols
    midmod.data = midmod.data.drop(
        columns=['midmod_inc', 'hospital_inc', 'midmod_risk_num'])

    # endregion ----------------------------------------------------------------

    print('Calculating severe incidence and prevalence...')
    ## Severe Incidence & Prevalence
    # region -------------------------------------------------------------------

    # Shift icu admissions
    lag_icu = copy.deepcopy(icu)
    lag_icu.data = lag_icu.data.drop(columns=['icu_deaths'])
    lag_icu.data.date = lag_icu.data.date + pd.to_timedelta(
        roots['defaults']['icu_to_death_duration'], unit='D')

    # Shift hospital deaths
    lag_hsp = copy.deepcopy(hospital)
    lag_hsp.data = lag_hsp.data.drop(columns=['hospital_inc'])
    lag_hsp.data.date = lag_hsp.data.date + pd.to_timedelta(
        roots['defaults']['hsp_no_icu_death_duration'], unit='D')

    # Merge lagged datasets
    lag = pd.merge(
        lag_icu.data,
        lag_hsp.data,
        how='left',
        on=['location_id', 'age_group_id', 'sex_id', 'draw_var', 'date'])
    del lag_icu, lag_hsp
    hospital.data = pd.merge(
        hospital.data.drop(columns=['hospital_deaths']),
        lag,
        how='left',
        on=['location_id', 'age_group_id', 'sex_id', 'draw_var', 'date'])
    del lag

    # severe at risk number = (hospital admissions - ICU admissions|3 days later - hospital deaths|6 days later) |
    #                          shift forward by {hospital duration if no ICU no death + hospital mild moderate duration after discharge}
    hospital.data['hospital_risk_num'] = (hospital.data.hospital_inc -
                                          hospital.data.icu_inc -
                                          hospital.data.hospital_deaths)
    hospital.data.date = hospital.data.date + pd.to_timedelta(
        (roots['defaults']['hsp_no_icu_no_death_duration'] +
         roots['defaults']['hsp_midmod_after_discharge_duration']),
        unit='D')

    # Calculate the incidence of each symptom and overlap, regardless of co-occurrence of additional symptoms (not mutually exclusive)
    # severe long-term incidence = severe at risk number * proportion of severe survivors with each long-term symptom cluster
    hospital.data['hospital_cog_inc'] = (
        hospital.data.hospital_risk_num *
        dp.loc[(dp.outcome == 'cognitive') &
               (dp.population == 'hospital'), 'proportion_mean'].values[0])
    hospital.data['hospital_fat_inc'] = (
        hospital.data.hospital_risk_num *
        dp.loc[(dp.outcome == 'fatigue') &
               (dp.population == 'hospital'), 'proportion_mean'].values[0])
    hospital.data['hospital_resp_inc'] = (
        hospital.data.hospital_risk_num *
        dp.loc[(dp.outcome == 'respiratory') &
               (dp.population == 'hospital'), 'proportion_mean'].values[0])
    hospital.data['hospital_cog_fat_inc'] = (
        hospital.data.hospital_risk_num *
        dp.loc[(dp.outcome == 'cognitive_fatigue') &
               (dp.population == 'hospital'), 'proportion_mean'].values[0])
    hospital.data['hospital_cog_resp_inc'] = (
        hospital.data.hospital_risk_num *
        dp.loc[(dp.outcome == 'cognitive_respiratory') &
               (dp.population == 'hospital'), 'proportion_mean'].values[0])
    hospital.data['hospital_fat_resp_inc'] = (
        hospital.data.hospital_risk_num *
        dp.loc[(dp.outcome == 'fatigue_respiratory') &
               (dp.population == 'hospital'), 'proportion_mean'].values[0])
    hospital.data['hospital_cog_fat_resp_inc'] = (
        hospital.data.hospital_risk_num *
        dp.loc[(dp.outcome == 'cognitive_fatigue_respiratory') &
               (dp.population == 'hospital'), 'proportion_mean'].values[0])

    # Creating mutually exclusive categories of symptoms
    # cog_inc = cog_inc - (cog_fat_inc - cog_fat_resp_inc) - (cog_resp_inc - cog_fat_resp_inc) - cog_fat_resp_inc
    hospital.data.hospital_cog_inc = (
        hospital.data.hospital_cog_inc -
        (hospital.data.hospital_cog_fat_inc -
         hospital.data.hospital_cog_fat_resp_inc) -
        (hospital.data.hospital_cog_resp_inc -
         hospital.data.hospital_cog_fat_resp_inc) -
        hospital.data.hospital_cog_fat_resp_inc)

    # fat_inc = fat_inc - (cog_fat_inc - cog_fat_resp_inc) -  (fat_resp_inc - cog_fat_resp_inc) - cog_fat_resp_inc
    hospital.data.hospital_fat_inc = (
        hospital.data.hospital_fat_inc -
        (hospital.data.hospital_cog_fat_inc -
         hospital.data.hospital_cog_fat_resp_inc) -
        (hospital.data.hospital_fat_resp_inc -
         hospital.data.hospital_cog_fat_resp_inc) -
        hospital.data.hospital_cog_fat_resp_inc)

    # resp_inc = resp_inc - (fat_resp_inc - cog_fat_resp_inc) - (cog_resp_inc - cog_fat_resp_inc) - cog_fat_resp_inc
    hospital.data.hospital_resp_inc = (
        hospital.data.hospital_resp_inc -
        (hospital.data.hospital_fat_resp_inc -
         hospital.data.hospital_cog_fat_resp_inc) -
        (hospital.data.hospital_cog_resp_inc -
         hospital.data.hospital_cog_fat_resp_inc) -
        hospital.data.hospital_cog_fat_resp_inc)

    # cog_fat_inc = cog_fat_inc - cog_fat_resp_inc
    hospital.data.hospital_cog_fat_inc = (
        hospital.data.hospital_cog_fat_inc -
        hospital.data.hospital_cog_fat_resp_inc)

    # cog_resp_inc = cog_resp_inc - cog_fat_resp_inc
    hospital.data.hospital_cog_resp_inc = (
        hospital.data.hospital_cog_resp_inc -
        hospital.data.hospital_cog_fat_resp_inc)

    # fat_resp_inc = fat_resp_inc - cog_fat_resp_inc
    hospital.data.hospital_fat_resp_inc = (
        hospital.data.hospital_fat_resp_inc -
        hospital.data.hospital_cog_fat_resp_inc)

    # severe long-term prevalence = severe long-term incidence * [duration]
    hospital.data = calc_prev(df=hospital.data,
                              dp=dp,
                              dst_population='hospital',
                              dst_outcome='cognitive',
                              calc_col_stub='hospital_cog_')
    hospital.data = calc_prev(df=hospital.data,
                              dp=dp,
                              dst_population='hospital',
                              dst_outcome='fatigue',
                              calc_col_stub='hospital_fat_')
    hospital.data = calc_prev(df=hospital.data,
                              dp=dp,
                              dst_population='hospital',
                              dst_outcome='respiratory',
                              calc_col_stub='hospital_resp_')
    hospital.data = calc_prev(df=hospital.data,
                              dp=dp,
                              dst_population='hospital',
                              dst_outcome='cognitive_fatigue',
                              calc_col_stub='hospital_cog_fat_')
    hospital.data = calc_prev(df=hospital.data,
                              dp=dp,
                              dst_population='hospital',
                              dst_outcome='cognitive_respiratory',
                              calc_col_stub='hospital_cog_resp_')
    hospital.data = calc_prev(df=hospital.data,
                              dp=dp,
                              dst_population='hospital',
                              dst_outcome='fatigue_respiratory',
                              calc_col_stub='hospital_fat_resp_')
    hospital.data = calc_prev(df=hospital.data,
                              dp=dp,
                              dst_population='hospital',
                              dst_outcome='cognitive_fatigue_respiratory',
                              calc_col_stub='hospital_cog_fat_resp_')

    # Remove unneeded cols
    hospital.data = hospital.data.drop(columns=[
        'hospital_inc', 'icu_inc', 'hospital_deaths', 'hospital_risk_num'
    ])

    # endregion ----------------------------------------------------------------

    print('Calculating critical incidence and prevalence...')
    ## Critical Incidence & Prevalence
    # region -------------------------------------------------------------------

    # Shift icu deaths
    lag_icu = copy.deepcopy(icu)
    lag_icu.data = lag_icu.data.drop(columns='icu_inc')
    lag_icu.data.date = lag_icu.data.date + pd.to_timedelta(
        roots['defaults']['icu_to_death_duration'], unit='D')

    # Merge icu and lag_icu
    icu.data = pd.merge(
        icu.data.drop(columns='icu_deaths'),
        lag_icu.data,
        how='left',
        on=['location_id', 'age_group_id', 'sex_id', 'draw_var', 'date'])
    del lag_icu

    # critical at risk number = (ICU admissions - ICU deaths|3 days later) |
    #                            shift forward by {ICU duration if no death + ICU mild moderate duration after discharge}
    icu.data['icu_risk_num'] = icu.data.icu_inc - icu.data.icu_deaths
    icu.data.date = icu.data.date - pd.to_timedelta(
        (roots['defaults']['icu_no_death_duration'] +
         roots['defaults']['icu_midmod_after_discharge_duration']),
        unit='D')

    # Calculate the incidence of each symptom and overlap, regardless of co-occurrence of additional symptoms (not mutually exclusive)
    # critical long-term incidence = critical number at risk * proportion of critical with each long-term symptom cluster
    icu.data['icu_cog_inc'] = (
        icu.data.icu_risk_num *
        dp.loc[(dp.outcome == 'cognitive') &
               (dp.population == 'icu'), 'proportion_mean'].values[0])
    icu.data['icu_fat_inc'] = (
        icu.data.icu_risk_num *
        dp.loc[(dp.outcome == 'fatigue') &
               (dp.population == 'icu'), 'proportion_mean'].values[0])
    icu.data['icu_resp_inc'] = (
        icu.data.icu_risk_num *
        dp.loc[(dp.outcome == 'respiratory') &
               (dp.population == 'icu'), 'proportion_mean'].values[0])
    icu.data['icu_cog_fat_inc'] = (
        icu.data.icu_risk_num *
        dp.loc[(dp.outcome == 'cognitive_fatigue') &
               (dp.population == 'icu'), 'proportion_mean'].values[0])
    icu.data['icu_cog_resp_inc'] = (
        icu.data.icu_risk_num *
        dp.loc[(dp.outcome == 'cognitive_respiratory') &
               (dp.population == 'icu'), 'proportion_mean'].values[0])
    icu.data['icu_fat_resp_inc'] = (
        icu.data.icu_risk_num *
        dp.loc[(dp.outcome == 'fatigue_respiratory') &
               (dp.population == 'icu'), 'proportion_mean'].values[0])
    icu.data['icu_cog_fat_resp_inc'] = (
        icu.data.icu_risk_num *
        dp.loc[(dp.outcome == 'cognitive_fatigue_respiratory') &
               (dp.population == 'icu'), 'proportion_mean'].values[0])

    # Creating mutually exclusive categories of symptoms
    # cog_inc = cog_inc - (cog_fat_inc - cog_fat_resp_inc) - (cog_resp_inc - cog_fat_resp_inc) - cog_fat_resp_inc
    icu.data.icu_cog_inc = (
        icu.data.icu_cog_inc -
        (icu.data.icu_cog_fat_inc - icu.data.icu_cog_fat_resp_inc) -
        (icu.data.icu_cog_resp_inc - icu.data.icu_cog_fat_resp_inc) -
        icu.data.icu_cog_fat_resp_inc)

    # fat_inc = fat_inc - (cog_fat_inc - cog_fat_resp_inc) -  (fat_resp_inc - cog_fat_resp_inc) - cog_fat_resp_inc
    icu.data.icu_fat_inc = (
        icu.data.icu_fat_inc -
        (icu.data.icu_cog_fat_inc - icu.data.icu_cog_fat_resp_inc) -
        (icu.data.icu_fat_resp_inc - icu.data.icu_cog_fat_resp_inc) -
        icu.data.icu_cog_fat_resp_inc)

    # resp_inc = resp_inc - (fat_resp_inc - cog_fat_resp_inc) - (cog_resp_inc - cog_fat_resp_inc) - cog_fat_resp_inc
    icu.data.icu_resp_inc = (
        icu.data.icu_resp_inc -
        (icu.data.icu_fat_resp_inc - icu.data.icu_cog_fat_resp_inc) -
        (icu.data.icu_cog_resp_inc - icu.data.icu_cog_fat_resp_inc) -
        icu.data.icu_cog_fat_resp_inc)

    # cog_fat_inc = cog_fat_inc - cog_fat_resp_inc
    icu.data.icu_cog_fat_inc = (icu.data.icu_cog_fat_inc -
                                icu.data.icu_cog_fat_resp_inc)

    # cog_resp_inc = cog_resp_inc - cog_fat_resp_inc
    icu.data.icu_cog_resp_inc = (icu.data.icu_cog_resp_inc -
                                 icu.data.icu_cog_fat_resp_inc)

    # fat_resp_inc = fat_resp_inc - cog_fat_resp_inc
    icu.data.icu_fat_resp_inc = (icu.data.icu_fat_resp_inc -
                                 icu.data.icu_cog_fat_resp_inc)

    # critical long-term prevalence = critical long-term incidence * [duration]
    icu.data = calc_prev(df=icu.data,
                         dp=dp,
                         dst_population='icu',
                         dst_outcome='cognitive',
                         calc_col_stub='icu_cog_')
    icu.data = calc_prev(df=icu.data,
                         dp=dp,
                         dst_population='icu',
                         dst_outcome='fatigue',
                         calc_col_stub='icu_fat_')
    icu.data = calc_prev(df=icu.data,
                         dp=dp,
                         dst_population='icu',
                         dst_outcome='respiratory',
                         calc_col_stub='icu_resp_')
    icu.data = calc_prev(df=icu.data,
                         dp=dp,
                         dst_population='icu',
                         dst_outcome='cognitive_fatigue',
                         calc_col_stub='icu_cog_fat_')
    icu.data = calc_prev(df=icu.data,
                         dp=dp,
                         dst_population='icu',
                         dst_outcome='cognitive_respiratory',
                         calc_col_stub='icu_cog_resp_')
    icu.data = calc_prev(df=icu.data,
                         dp=dp,
                         dst_population='icu',
                         dst_outcome='fatigue_respiratory',
                         calc_col_stub='icu_fat_resp_')
    icu.data = calc_prev(df=icu.data,
                         dp=dp,
                         dst_population='icu',
                         dst_outcome='cognitive_fatigue_respiratory',
                         calc_col_stub='icu_cog_fat_resp_')

    # Remove unneeded cols
    icu.data = icu.data.drop(columns=['icu_inc', 'icu_deaths', 'icu_risk_num'])
    del dp

    # endregion ----------------------------------------------------------------

    print('Aggregating severities...')
    ## Aggregate Severities
    # region -------------------------------------------------------------------

    df = copy.deepcopy(midmod)
    del midmod
    df.data = pd.merge(
        df.data,
        hospital.data,
        how='outer',
        on=['location_id', 'age_group_id', 'sex_id', 'draw_var', 'date'])
    del hospital
    df.data = pd.merge(
        df.data,
        icu.data,
        how='outer',
        on=['location_id', 'age_group_id', 'sex_id', 'draw_var', 'date'])
    del icu

    # Incidence
    df.data['cognitive_inc'] = df.data[[
        'midmod_cog_inc', 'hospital_cog_inc', 'icu_cog_inc'
    ]].sum(axis=1)
    df.data.drop(columns=['midmod_cog_inc', 'hospital_cog_inc', 'icu_cog_inc'],
                 inplace=True)
    df.data['fatigue_inc'] = df.data[[
        'midmod_fat_inc', 'hospital_fat_inc', 'icu_fat_inc'
    ]].sum(axis=1)
    df.data.drop(columns=['midmod_fat_inc', 'hospital_fat_inc', 'icu_fat_inc'],
                 inplace=True)
    df.data['respiratory_inc'] = df.data[[
        'midmod_resp_inc', 'hospital_resp_inc', 'icu_resp_inc'
    ]].sum(axis=1)
    df.data.drop(
        columns=['midmod_resp_inc', 'hospital_resp_inc', 'icu_resp_inc'],
        inplace=True)
    df.data['cognitive_fatigue_inc'] = df.data[[
        'midmod_cog_fat_inc', 'hospital_cog_fat_inc', 'icu_cog_fat_inc'
    ]].sum(axis=1)
    df.data.drop(columns=[
        'midmod_cog_fat_inc', 'hospital_cog_fat_inc', 'icu_cog_fat_inc'
    ],
                 inplace=True)
    df.data['cognitive_respiratory_inc'] = df.data[[
        'midmod_cog_resp_inc', 'hospital_cog_resp_inc', 'icu_cog_resp_inc'
    ]].sum(axis=1)
    df.data.drop(columns=[
        'midmod_cog_resp_inc', 'hospital_cog_resp_inc', 'icu_cog_resp_inc'
    ],
                 inplace=True)
    df.data['fatigue_respiratory_inc'] = df.data[[
        'midmod_fat_resp_inc', 'hospital_fat_resp_inc', 'icu_fat_resp_inc'
    ]].sum(axis=1)
    df.data.drop(columns=[
        'midmod_fat_resp_inc', 'hospital_fat_resp_inc', 'icu_fat_resp_inc'
    ],
                 inplace=True)
    df.data['cognitive_fatigue_respiratory_inc'] = df.data[[
        'midmod_cog_fat_resp_inc', 'hospital_cog_fat_resp_inc',
        'icu_cog_fat_resp_inc'
    ]].sum(axis=1)
    df.data.drop(columns=[
        'midmod_cog_fat_resp_inc', 'hospital_cog_fat_resp_inc',
        'icu_cog_fat_resp_inc'
    ],
                 inplace=True)

    # Prevalence
    df.data['cognitive_prev'] = df.data[[
        'midmod_cog_prev', 'hospital_cog_prev', 'icu_cog_prev'
    ]].sum(axis=1)
    df.data.drop(
        columns=['midmod_cog_prev', 'hospital_cog_prev', 'icu_cog_prev'],
        inplace=True)
    df.data['fatigue_prev'] = df.data[[
        'midmod_fat_prev', 'hospital_fat_prev', 'icu_fat_prev'
    ]].sum(axis=1)
    df.data.drop(
        columns=['midmod_fat_prev', 'hospital_fat_prev', 'icu_fat_prev'],
        inplace=True)
    df.data['respiratory_prev'] = df.data[[
        'midmod_resp_prev', 'hospital_resp_prev', 'icu_resp_prev'
    ]].sum(axis=1)
    df.data.drop(
        columns=['midmod_resp_prev', 'hospital_resp_prev', 'icu_resp_prev'],
        inplace=True)
    df.data['cognitive_fatigue_prev'] = df.data[[
        'midmod_cog_fat_prev', 'hospital_cog_fat_prev', 'icu_cog_fat_prev'
    ]].sum(axis=1)
    df.data.drop(columns=[
        'midmod_cog_fat_prev', 'hospital_cog_fat_prev', 'icu_cog_fat_prev'
    ],
                 inplace=True)
    df.data['cognitive_respiratory_prev'] = df.data[[
        'midmod_cog_resp_prev', 'hospital_cog_resp_prev', 'icu_cog_resp_prev'
    ]].sum(axis=1)
    df.data.drop(columns=[
        'midmod_cog_resp_prev', 'hospital_cog_resp_prev', 'icu_cog_resp_prev'
    ],
                 inplace=True)
    df.data['fatigue_respiratory_prev'] = df.data[[
        'midmod_fat_resp_prev', 'hospital_fat_resp_prev', 'icu_fat_resp_prev'
    ]].sum(axis=1)
    df.data.drop(columns=[
        'midmod_fat_resp_prev', 'hospital_fat_resp_prev', 'icu_fat_resp_prev'
    ],
                 inplace=True)
    df.data['cognitive_fatigue_respiratory_prev'] = df.data[[
        'midmod_cog_fat_resp_prev', 'hospital_cog_fat_resp_prev',
        'icu_cog_fat_resp_prev'
    ]].sum(axis=1)
    df.data.drop(columns=[
        'midmod_cog_fat_resp_prev', 'hospital_cog_fat_resp_prev',
        'icu_cog_fat_resp_prev'
    ],
                 inplace=True)

    # endregion ----------------------------------------------------------------

    print('Aggregating by year...')
    ## Aggregate by year
    # region -------------------------------------------------------------------

    # Subset to 2020
    df.data = df.data[(df.data.date >= datetime.datetime(2020, 1, 1))
                      & (df.data.date <= datetime.datetime(2020, 12, 31))]

    # Sum by day
    df.collapse(
        agg_function='sum',
        group_cols=['location_id', 'age_group_id', 'sex_id', 'draw_var'],
        calc_cols=[
            'cognitive_inc', 'cognitive_prev', 'fatigue_inc', 'fatigue_prev',
            'respiratory_inc', 'respiratory_prev', 'cognitive_fatigue_inc',
            'cognitive_fatigue_prev', 'cognitive_respiratory_inc',
            'cognitive_respiratory_prev', 'fatigue_respiratory_inc',
            'fatigue_respiratory_prev', 'cognitive_fatigue_respiratory_inc',
            'cognitive_fatigue_respiratory_prev'
        ])

    # Divide prevalence by 366
    df.data.cognitive_prev = df.data.cognitive_prev / 366
    df.data.fatigue_prev = df.data.fatigue_prev / 366
    df.data.respiratory_prev = df.data.respiratory_prev / 366
    df.data.cognitive_fatigue_prev = df.data.cognitive_fatigue_prev / 366
    df.data.cognitive_respiratory_prev = df.data.cognitive_respiratory_prev / 366
    df.data.fatigue_respiratory_prev = df.data.fatigue_respiratory_prev / 366
    df.data.cognitive_fatigue_respiratory_prev = df.data.cognitive_fatigue_respiratory_prev / 366

    # Ensure incidence and prevalence aren't negative
    df.check_neg(calc_cols=[
        'cognitive_inc', 'cognitive_prev', 'fatigue_inc', 'fatigue_prev',
        'respiratory_inc', 'respiratory_prev', 'cognitive_fatigue_inc',
        'cognitive_fatigue_prev', 'cognitive_respiratory_inc',
        'cognitive_respiratory_prev', 'fatigue_respiratory_inc',
        'fatigue_respiratory_prev', 'cognitive_fatigue_respiratory_inc',
        'cognitive_fatigue_respiratory_prev'
    ])

    # endregion ----------------------------------------------------------------

    print('Calculating rates...')
    ## Calculate rates
    # region -------------------------------------------------------------------

    # Pull population
    pop = get_population(age_group_id=roots['age_groups'],
                         single_year_age=False,
                         location_id=loc_id,
                         location_set_id=35,
                         year_id=roots['gbd_year'],
                         sex_id=[1, 2],
                         gbd_round_id=roots['gbd_round'],
                         status='best',
                         decomp_step=roots['decomp_step'])
    pop.drop(columns=['year_id', 'run_id'], inplace=True)

    # Merge population
    df.data = pd.merge(df.data,
                       pop,
                       how='left',
                       on=['location_id', 'age_group_id', 'sex_id'])

    # Calculate rates
    df.data['cognitive_inc_rate'] = df.data.cognitive_inc / df.data.population
    df.data['fatigue_inc_rate'] = df.data.fatigue_inc / df.data.population
    df.data[
        'respiratory_inc_rate'] = df.data.respiratory_inc / df.data.population
    df.data[
        'cognitive_fatigue_inc_rate'] = df.data.cognitive_fatigue_inc / df.data.population
    df.data[
        'cognitive_respiratory_inc_rate'] = df.data.cognitive_respiratory_inc / df.data.population
    df.data[
        'fatigue_respiratory_inc_rate'] = df.data.fatigue_respiratory_inc / df.data.population
    df.data[
        'cognitive_fatigue_respiratory_inc_rate'] = df.data.cognitive_fatigue_respiratory_inc / df.data.population

    df.data[
        'cognitive_prev_rate'] = df.data.cognitive_prev / df.data.population
    df.data['fatigue_prev_rate'] = df.data.fatigue_prev / df.data.population
    df.data[
        'respiratory_prev_rate'] = df.data.respiratory_prev / df.data.population
    df.data[
        'cognitive_fatigue_prev_rate'] = df.data.cognitive_fatigue_prev / df.data.population
    df.data[
        'cognitive_respiratory_prev_rate'] = df.data.cognitive_respiratory_prev / df.data.population
    df.data[
        'fatigue_respiratory_prev_rate'] = df.data.fatigue_respiratory_prev / df.data.population
    df.data[
        'cognitive_fatigue_respiratory_prev_rate'] = df.data.cognitive_fatigue_respiratory_prev / df.data.population
    # endregion ----------------------------------------------------------------

    print('Calculating YLDs...')
    ## Calculate YLDs
    # region -------------------------------------------------------------------

    # Read in disability weights
    dw = pd.read_csv('{}dws.csv'.format(roots['disability_weight']))

    # Temporary values
    df.data['cognitive_YLD'] = df.data.cognitive_prev_rate * 0.01
    df.data['fatigue_YLD'] = df.data.fatigue_prev_rate * 0.01
    df.data['respiratory_YLD'] = df.data.respiratory_prev_rate * 0.01
    df.data[
        'cognitive_fatigue_YLD'] = df.data.cognitive_fatigue_prev_rate * 0.01
    df.data[
        'cognitive_respiratory_YLD'] = df.data.cognitive_respiratory_prev_rate * 0.01
    df.data[
        'fatigue_respiratory_YLD'] = df.data.fatigue_respiratory_prev_rate * 0.01
    df.data[
        'cognitive_fatigue_respiratory_YLD'] = df.data.cognitive_fatigue_respiratory_prev_rate * 0.01

    del dw

    # endregion ----------------------------------------------------------------

    print('Saving datasets and running diagnostics...')
    ## Save datasets & run diagnostics
    # region -------------------------------------------------------------------

    # Cognitive
    df.save_data(output_cols=[
        'location_id', 'age_group_id', 'sex_id', 'draw_var', 'cognitive_inc',
        'cognitive_prev', 'cognitive_inc_rate', 'cognitive_prev_rate',
        'cognitive_YLD'
    ],
                 filename='cognitive',
                 stage='stage_2')

    # Fatigue
    df.save_data(output_cols=[
        'location_id', 'age_group_id', 'sex_id', 'draw_var', 'fatigue_inc',
        'fatigue_prev', 'fatigue_inc_rate', 'fatigue_prev_rate', 'fatigue_YLD'
    ],
                 filename='fatigue',
                 stage='stage_2')

    # Respiratory
    df.save_data(output_cols=[
        'location_id', 'age_group_id', 'sex_id', 'draw_var', 'respiratory_inc',
        'respiratory_prev', 'respiratory_inc_rate', 'respiratory_prev_rate',
        'respiratory_YLD'
    ],
                 filename='respiratory',
                 stage='stage_2')

    # Cognitive Fatigue
    df.save_data(output_cols=[
        'location_id', 'age_group_id', 'sex_id', 'draw_var',
        'cognitive_fatigue_inc', 'cognitive_fatigue_prev',
        'cognitive_fatigue_inc_rate', 'cognitive_fatigue_prev_rate',
        'cognitive_fatigue_YLD'
    ],
                 filename='cognitive_fatigue',
                 stage='stage_2')

    # Cognitive Respiratory
    df.save_data(output_cols=[
        'location_id', 'age_group_id', 'sex_id', 'draw_var',
        'cognitive_respiratory_inc', 'cognitive_respiratory_prev',
        'cognitive_respiratory_inc_rate', 'cognitive_respiratory_prev_rate',
        'cognitive_respiratory_YLD'
    ],
                 filename='cognitive_respiratory',
                 stage='stage_2')

    # Fatigue Respiratory
    df.save_data(output_cols=[
        'location_id', 'age_group_id', 'sex_id', 'draw_var',
        'fatigue_respiratory_inc', 'fatigue_respiratory_prev',
        'fatigue_respiratory_inc_rate', 'fatigue_respiratory_prev_rate',
        'fatigue_respiratory_YLD'
    ],
                 filename='fatigue_respiratory',
                 stage='stage_2')

    # Cognitive Fatigue Respiratory
    df.save_data(output_cols=[
        'location_id', 'age_group_id', 'sex_id', 'draw_var',
        'cognitive_fatigue_respiratory_inc',
        'cognitive_fatigue_respiratory_prev',
        'cognitive_fatigue_respiratory_inc_rate',
        'cognitive_fatigue_respiratory_prev_rate',
        'cognitive_fatigue_respiratory_YLD'
    ],
                 filename='cognitive_fatigue_respiratory',
                 stage='stage_2')
from classes.Driver import Driver
from classes.State import ACTIONS
from classes.Dataset import Dataset
from classes.LoadCar import load_car
from classes.KBhit import KBHit
import logging
from time import sleep, time
from Queue import Queue
from threading import Thread
logger = logging.getLogger("play_dataset.py")

print "ACTIONS = " + str(ACTIONS)

car, rps_ms, port = load_car("../config.json")
driver = Driver(car, show_camera=True)
dataset = Dataset()
q = Queue()


def save_dataset():
    global q
    while True:
        state0, action, state1 = q.get()
        datavector, datavector_title = driver.process_dataset_vector(
            state0, action, state1)
        dataset.save_data(datavector, datavector_title)

        logging.debug("Dataset saved")


t = Thread(target=save_dataset)