Ejemplo n.º 1
0
def run_nn_resume_single(locator, random_variables, target_parameters,
                         list_building_names, weather_path, gv):

    urban_input_matrix, urban_taget_matrix = nn_input_collector(locator)
    model, scalerT, scalerX = nn_model_collector(locator)
    neural_trainer_resume(urban_input_matrix, urban_taget_matrix, model,
                          scalerX, scalerT, locator)
def run_nn_continue(locator, autoencoder):
    '''
    this function continues a pipeline of tasks by calling a random sampler and a neural network trainer
    :param locator: points to the variables
    :param random_variables:  a list containing the names of variables associated with uncertainty (can be accessed from 'nn_settings.py')
    :param target_parameters:  a list containing the name of desirable outputs (can be accessed from 'nn_settings.py')
    :param list_building_names: a list containing the name of desired buildings
    :param weather_path: weather path
    :param gv: global variables
    :return: -
    '''

    for k in range(number_sweeps):
        collect_count = 0
        while (collect_count < number_samples_scaler):
            #   fix a different seed number (for random generation) in each loop
            #np.random.seed(collect_count)
            #   reads the n random files from the previous step and creat the input and targets for the neural net
            urban_input_matrix, urban_taget_matrix, collect_count = presampled_collector(
                locator, collect_count)
            #   reads the saved model and the normalizer
            model, scalerT, scalerX = nn_model_collector(locator)
            #   resume training of the neural net
            neural_trainer_resume(urban_input_matrix, urban_taget_matrix,
                                  model, scalerX, scalerT, locator,
                                  autoencoder)
Ejemplo n.º 3
0
def eval_nn_performance(locator, random_variables, target_parameters,
                        list_building_names, config, nn_delay,
                        climatic_variables, region, year, use_daysim_radiation,
                        use_stochastic_occupancy):
    urban_input_matrix, urban_taget_matrix = sampling_single(
        locator, random_variables, target_parameters, list_building_names,
        config, nn_delay, climatic_variables, region, year,
        use_daysim_radiation, use_stochastic_occupancy)
    model, scalerT, scalerX = nn_model_collector(locator)
    get_nn_performance(model, scalerT, scalerX, urban_input_matrix,
                       urban_taget_matrix, locator)
Ejemplo n.º 4
0
def ss_initial_sample_loader(number_samples_scaler, locator,
                             list_building_names):
    building_names_with_measurement, all_measurements_matrix = ss_measurment_loader(
        locator)
    building_numbers_with_measurement = [
        2, 11, 82, 83, 84, 85, 86, 87, 88, 89, 90, 96, 97, 98, 99, 100, 101,
        115, 116, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 138,
        198, 199, 200, 201, 202, 203, 207, 211, 221
    ]
    scaler_inout_path = locator.get_minmaxscaler_folder()
    model, scalerT, scalerX = nn_model_collector(locator)

    file_path_inputs = os.path.join(scaler_inout_path, "input0.csv")
    urban_input_matrix = np.asarray(pd.read_csv(file_path_inputs, header=None))

    # reshape file to get a tensor of buildings, features, time.
    num_buildings = len(list_building_names)
    num_features = len(urban_input_matrix[0])
    num_outputs = len(target_parameters)
    matrix = np.empty([num_buildings, 8759 + warmup_period, num_outputs])
    reshaped_input_matrix = urban_input_matrix.reshape(num_buildings, 8759,
                                                       num_features)
    # including warm up period
    warmup_period_input_matrix = reshaped_input_matrix[:, (8759 -
                                                           warmup_period):, :]
    concat_input_matrix = np.hstack(
        (warmup_period_input_matrix, reshaped_input_matrix))

    for i in range(8759 + warmup_period):
        one_hour_step = concat_input_matrix[:, i, :]
        if i < 1:
            first_hour_step = np.empty([num_buildings, num_outputs])
            first_hour_step = first_hour_step * 0
            one_hour_step[:, 36:41] = first_hour_step
            inputs_x = scalerX.transform(one_hour_step)
            model_estimates = model.predict(inputs_x)
            matrix[:, i, :] = scalerT.inverse_transform(model_estimates)
        else:
            other_hour_step = matrix[:, i - 1, :]
            one_hour_step[:, 36:41] = other_hour_step
            inputs_x = scalerX.transform(one_hour_step)
            model_estimates = model.predict(inputs_x)
            matrix[:, i, :] = scalerT.inverse_transform(model_estimates)
    vector = matrix[:, warmup_period - 1:, :]

    all_predictions_matrix = vector[building_numbers_with_measurement, :, 0].T
    mbe_initial_sample = np.median(all_measurements_matrix -
                                   all_predictions_matrix,
                                   axis=0)
    cvrmse_initial_sample = np.divide((np.sqrt(
        np.mean(((all_measurements_matrix - all_predictions_matrix) *
                 (all_measurements_matrix - all_predictions_matrix)),
                axis=1))), all_measurements_matrix)
    return mbe_initial_sample, cvrmse_initial_sample
Ejemplo n.º 5
0
def run_nn_pipeline(locator, random_variables, target_parameters,
                    list_building_names, weather_path, gv, scalerX, scalerT,
                    multiprocessing, config, nn_delay, climatic_variables,
                    region, year, use_daysim_radiation):
    '''
    this function enables a pipeline of tasks by calling a random sampler and a neural network trainer
    :param locator: points to the variables
    :param random_variables:  a list containing the names of variables associated with uncertainty (can be accessed from 'nn_settings.py')
    :param target_parameters:  a list containing the name of desirable outputs (can be accessed from 'nn_settings.py')
    :param list_building_names: a list containing the name of desired buildings
    :param weather_path: weather path
    :param gv: global variables
    :return: -
    '''
    #   create n random sample of the whole dataset of buildings. n is accessible from 'nn_settings.py'
    sampling_main(locator, random_variables, target_parameters,
                  list_building_names, weather_path, gv, multiprocessing,
                  config, nn_delay, climatic_variables, region, year,
                  use_daysim_radiation)
    #   reads the n random files from the previous step and creat the input and targets for the neural net
    urban_input_matrix, urban_taget_matrix = nn_input_collector(locator)
    #   train the neural net
    neural_trainer(urban_input_matrix, urban_taget_matrix, locator, scalerX,
                   scalerT, autoencoder)
    #   do nn_passes additional training (nn_passes can be accessed from 'nn_settings.py')
    for i in range(nn_passes):
        #   fix a different seed number (for random generation) in each loop
        np.random.seed(i)
        #   create n random sample of the whole dataset of buildings. n is accessible from 'nn_settings.py'
        sampling_main(locator, random_variables, target_parameters,
                      list_building_names, weather_path, gv, multiprocessing,
                      config, nn_delay, climatic_variables, region, year,
                      use_daysim_radiation)
        #   reads the n random files from the previous step and creat the input and targets for the neural net
        urban_input_matrix, urban_taget_matrix = nn_input_collector(locator)
        #   reads the saved model and the normalizer
        model, scalerT, scalerX = nn_model_collector(locator)
        #   resume training of the neural net
        neural_trainer_resume(urban_input_matrix, urban_taget_matrix, model,
                              scalerX, scalerT, locator, autoencoder)
        print("%d random sample passes of the city have been completed" % i)
def input_prepare_estimate(list_building_names, locator, gv, climatic_variables, region, year,
                           use_daysim_radiation, use_stochastic_occupancy, weather_array, weather_data):
    '''
    this function prepares the inputs and targets for the neural net by splitting the jobs between different processors
    :param list_building_names: a list of building names
    :param locator: points to the variables
    :param target_parameters: (imported from 'nn_settings.py') a list containing the name of desirable outputs
    :param gv: global variables
    :return: inputs and targets for the whole dataset (urban_input_matrix, urban_taget_matrix)
    '''

    building_properties, schedules_dict, date = properties_and_schedule(locator, region, year, use_daysim_radiation)
    #   open multiprocessing pool
    pool = mp.Pool()
    #   count number of CPUs
    print("Using {cpu_count} CPU's".format(cpu_count=mp.cpu_count()))
    #   creat an empty job list to be filled later
    joblist = []
    #   create one job for each data preparation task i.e. each building
    for building_name in list_building_names:
        job = pool.apply_async(input_estimate_prepare_multi_processing,
                               [building_name, gv, locator, climatic_variables, region, year, use_daysim_radiation,
                                use_stochastic_occupancy, weather_array, weather_data,
                                building_properties, schedules_dict, date])
        joblist.append(job)
    # run the input/target preperation for all buildings in the list (here called jobs)
    for i, job in enumerate(joblist):
        NN_input_ready = job.get(240)
        #   remove buildings that have "NaN" in their input (e.g. if heating/cooling is off, the indoor temperature
        #   will be returned as "NaN"). Afterwards, stack the inputs/targets of all buildings
        check_nan = 1 * (np.isnan(np.sum(NN_input_ready)))
        if check_nan == 0:
            if i == 0:
                urban_input_matrix = NN_input_ready
            else:
                urban_input_matrix = np.concatenate((urban_input_matrix, NN_input_ready))

    # close the multiprocessing
    pool.close()

    # from cea.demand.metamodel.nn_generator.input_matrix import input_prepare_multi_processing
    # for counter, building_name in enumerate(list_building_names):
    #     NN_input_ready, NN_target_ready = input_prepare_multi_processing(building_name, gv, locator, target_parameters,
    #                                                                      nn_delay, climatic_variables, region, year,
    #                                                                      use_daysim_radiation,use_stochastic_occupancy,
    #                                                                      weather_array, weather_data,
    #                                                                      building_properties, schedules_dict, date)
    #     check_nan = 1 * (np.isnan(np.sum(NN_input_ready)))
    #     if check_nan == 0:
    #         if counter == 0:
    #             urban_input_matrix = NN_input_ready
    #
    #         else:
    #             urban_input_matrix = np.concatenate((urban_input_matrix, NN_input_ready))

    # print (counter)

    model, scalerT, scalerX = nn_model_collector(locator)

    # reshape file to get a tensor of buildings, features, time.
    num_buildings = len(list_building_names)
    num_features = len(urban_input_matrix[0])
    num_outputs = len(target_parameters)
    matrix = np.empty([num_buildings, 8759+warmup_period, num_outputs])
    reshaped_input_matrix = urban_input_matrix.reshape(num_buildings, 8759, num_features)

    # including warm up period
    warmup_period_input_matrix = reshaped_input_matrix[:,(8759-warmup_period):,:]
    concat_input_matrix = np.hstack((warmup_period_input_matrix, reshaped_input_matrix))

    for i in range(8759+warmup_period):
        one_hour_step = concat_input_matrix[:, i, :]
        if i<1:
            first_hour_step = np.empty([num_buildings, num_outputs])
            first_hour_step=first_hour_step*0
            one_hour_step[:, 36:41]=first_hour_step
            inputs_x = scalerX.transform(one_hour_step)
            model_estimates = model.predict(inputs_x)
            matrix[:, i, :] = scalerT.inverse_transform(model_estimates)
        else:
            other_hour_step = matrix[:,i-1,:]
            one_hour_step[:, 36:41] = other_hour_step
            inputs_x = scalerX.transform(one_hour_step)
            model_estimates = model.predict(inputs_x)
            matrix[:, i, :] = scalerT.inverse_transform(model_estimates)


    # lets save:
    for i, name in enumerate(list_building_names):
        vector = matrix[i][warmup_period-1:, :].T
        dict_to_dataframe = dict(zip(target_parameters, vector ))
        pd.DataFrame(dict_to_dataframe).to_csv(locator.get_result_building_NN(name), float_format='%.3f')

    print "done"

    return