'1200rpm', 'variable_rpm', ], 'fit_modes': [ 'per_feature', 'per_sample', ], } run_configs = { 'data_set': config['data_sets'], 'model_function': config['model_functions'], 'scaling': config['scalings'], 'fit_mode': config['fit_modes'], } experiment = Experiment() experiment.log('config/config', config) experiment.log('config/run_configs', run_configs) def run_callable(run_config: dict): def modifier(x): return x[x.rpm > 0] def pre_processing(data_frame): if run_config['scaling'] == 'min_max': samples = Scaler(MinMaxScaler, fit_mode=run_config['fit_mode']).fit_transform(data_frame.to_numpy()) else: samples = data_frame.to_numpy() return build_samples(samples, target_sample_length=config['input_size'], target_dimensions=3)
'800rpm_gradual', # '1200rpm', # 'variable_rpm' ], 'fit_modes': [ 'per_feature', # 'per_sample' ], } run_configs = { 'data_set': config['data_sets'], 'fit_mode': config['fit_modes'], 'scaling': config['scalings'], } experiment = Experiment(auto_datetime_directory=False) experiment.log('config/config', config) experiment.log('config/run_configs', run_configs) model = create_feed_forward_autoencoder( input_dimension=config['input_size'], encoding_dimension=config['encoding_size'], hidden_layer_activations=config['hidden_layer_activations'], output_layer_activation=config['output_layer_activation'], loss=config['loss'], ) def run_callable(run_config: dict): experiment.print('Loading data') bearing_dataset = load_data(run_config['data_set'])
'a2e.evaluation.reconstruction_error_cost', ], } config_space = create_config_space( min_dropout_rate_input=.2, max_dropout_rate_input=.8, min_dropout_rate_hidden_layers=.2, max_dropout_rate_hidden_layers=.8, min_dropout_rate_output=.2, max_dropout_rate_output=.8, ) if __name__ == '__main__': experiment = Experiment(auto_datetime_directory=True) experiment.log('config/config', config) experiment.log('config/run_configs', run_configs) experiment.log('config/config_space', str(config_space)) def run_callable(run_config: dict): experiment.print('Loading data') bearing_dataset = load_data(run_config['data_set']) x_train = bearing_dataset.train(column=config['data_column'], as_numpy=True) experiment.print('Initializing optimizer') experiment.print(f'max_iterations = {config["max_iterations"]}') optimizer = create_optimizer( run_config['optimizer'], config_space=config_space, model=KerasModel(