Ejemplo n.º 1
0
                regularization=1, normalize=True)

# Adding sensors
system.add_sensor(name='Temperature', measured=True, controlled=True, unit='C')
system.add_sensor(name='Concentration', measured=True, controlled=False, unit='g/µL')

# Activate phenomena
system.activate_phenomena(['nucleation', 'growth'])

# Create data-set and set up data-shuffler
data = Data(case_id='Demo data')
data.load_from_pickle('demo_data')
time_series_pair = TimeSeriesPair(data=data, system=system)

# Split training and validation data
data.set_batch_pool(pool_batch_id=['Demo batch 0', 'Demo batch 1', 'Demo batch 2', 'Demo batch 3'], pool_type='Training')
data.set_batch_pool(pool_batch_id=['Demo batch 4', 'Demo batch 5', 'Demo batch 6', 'Demo batch 7', 'Demo batch 8', 'Demo batch 9'], pool_type='Validation')
data.set_batch_pool(pool_batch_id=['Demo batch 4'], pool_type='Test')

# Set up hybrid training model
hybrid_model = HybridModel(system=system)

# Compile hybrid model
hybrid_model.training_model.compile(loss=hybrid_model.loss_model.loss, optimizer='Adam')

# Generate shuffled training and evaluation data
training_data = time_series_pair.shuffle(pool_type=['Training'], delta_t_critical=20*60)
validation_data = time_series_pair.shuffle(pool_type=['Validation'], delta_t_critical=20*60)
test_data = time_series_pair.shuffle(pool_type=['Test'], min_step=1, max_step=1)

# Create training data