Esempio n. 1
0
    # Build in some tolerance for old models trained with former APIs missing the is_convolutional and is_recurrent
    # attributes. This may not always work!
    if not hasattr(dlwp, 'is_recurrent'):
        dlwp.is_recurrent = False
        for layer in dlwp.model.layers:
            if 'LSTM' in layer.name.upper() or 'LST_M' in layer.name.upper():
                dlwp.is_recurrent = True
    if not hasattr(dlwp, 'is_convolutional'):
        dlwp.is_convolutional = False
        for layer in dlwp.model.layers:
            if 'CONV' in layer.name.upper():
                dlwp.is_convolutional = True

    # Create data generator
    val_generator = SeriesDataGenerator(dlwp, validation_data, add_insolation=add_insolation[m],
                                        input_sel=input_selection[m], output_sel=output_selection[m],
                                        input_time_steps=input_time_steps[m], output_time_steps=output_time_steps[m],
                                        batch_size=64)

    # Create TimeSeriesEstimator
    estimator = TimeSeriesEstimator(dlwp, val_generator)

    # Very crude but for this test I want to exclude the predicted thickness from being added back
    if model_labels[m] == r'$\tau$ LSTM16':
        estimator._outputs_in_inputs = {'varlev': np.array(['HGT/500'])}

    # Make a time series prediction
    print('Predicting with model %s...' % model_labels[m])
    time_series = estimator.predict(num_forecast_steps, verbose=1)

    # Slice the arrays as we want
    time_series = time_series.sel(**selection, lat=((time_series.lat >= lat_min) & (time_series.lat <= lat_max)))
Esempio n. 2
0
            dlwp.is_convolutional = True
if isinstance(dlwp, DLWPFunctional):
    if not hasattr(dlwp, '_n_steps'):
        dlwp._n_steps = 6
    if not hasattr(dlwp, 'time_dim'):
        dlwp.time_dim = 2
    sequence = dlwp._n_steps
else:
    sequence = None

# Create data generator
generator = SeriesDataGenerator(dlwp,
                                data,
                                batch_size=216,
                                input_sel=input_selection,
                                output_sel=output_selection,
                                input_time_steps=input_time_steps,
                                output_time_steps=output_time_steps,
                                add_insolation=add_insolation,
                                sequence=sequence)
p_val, t_val = generator.generate([], scale_and_impute=False)

# Create TimeSeriesEstimator
estimator = TimeSeriesEstimator(dlwp, generator)

# Make a time series prediction and convert the predictors for comparison
print('Predicting with model %s...' % model_label)
forecast = estimator.predict(num_plot_steps, verbose=1)
forecast = forecast.sel(**selection)

# Scale the forecast
Esempio n. 3
0
                            invert=True)
    validation_data = data.sel(sample=validation_set)
    train_data = data.sel(sample=train_set)

# For multiple GPUs, increase the batch size
batch_size = n_gpu * batch_size

# Build the data generators
if load_memory or use_keras_fit:
    print('Loading data to memory...')
generator = SeriesDataGenerator(dlwp,
                                train_data,
                                input_sel=input_selection,
                                output_sel=output_selection,
                                input_time_steps=input_time_steps,
                                output_time_steps=output_time_steps,
                                batch_size=batch_size,
                                add_insolation=add_solar,
                                load=load_memory,
                                shuffle=shuffle,
                                interval=step_interval)
if use_keras_fit:
    p_train, t_train = generator.generate([])
if validation_data is not None:
    val_generator = SeriesDataGenerator(dlwp,
                                        validation_data,
                                        input_sel=input_selection,
                                        output_sel=output_selection,
                                        input_time_steps=input_time_steps,
                                        output_time_steps=output_time_steps,
                                        batch_size=batch_size,
Esempio n. 4
0
        train_set = np.isin(data.sample.values,
                            np.array(validation_set, dtype='datetime64[ns]'),
                            assume_unique=True,
                            invert=True)
    validation_data = data.sel(sample=validation_set)
    train_data = data.sel(sample=train_set)

# Build the data generators
if load_memory or use_keras_fit:
    print('Loading data to memory...')
generator = SeriesDataGenerator(dlwp,
                                train_data,
                                input_sel=io_selection,
                                output_sel=io_selection,
                                input_time_steps=io_time_steps,
                                output_time_steps=io_time_steps,
                                sequence=integration_steps,
                                add_insolation=add_solar,
                                batch_size=batch_size,
                                load=load_memory,
                                shuffle=shuffle)
if use_keras_fit:
    p_train, t_train = generator.generate([])
if validation_data is not None:
    val_generator = SeriesDataGenerator(dlwp,
                                        validation_data,
                                        input_sel=io_selection,
                                        output_sel=io_selection,
                                        input_time_steps=io_time_steps,
                                        output_time_steps=io_time_steps,
                                        sequence=integration_steps,