def test_save_load_fitted_atomized_chain_correctly(): chain = create_chain_with_several_nested_atomized_model() train_data, test_data = create_data_for_train() chain.fit(train_data) json_actual = chain.save('test_save_load_fitted_atomized_chain_correctly') json_path_load = create_correct_path('test_save_load_fitted_atomized_chain_correctly') chain_loaded = Chain() chain_loaded.load(json_path_load) json_expected = chain_loaded.save('test_save_load_fitted_atomized_chain_correctly_loaded') assert chain.length == chain_loaded.length assert json_actual == json_expected before_save_predicted = chain.predict(test_data) chain_loaded.fit(train_data) after_save_predicted = chain_loaded.predict(test_data) bfr_tun_mse = mean_squared_error(y_true=test_data.target, y_pred=before_save_predicted.predict) aft_tun_mse = mean_squared_error(y_true=test_data.target, y_pred=after_save_predicted.predict) assert aft_tun_mse <= bfr_tun_mse
def test_import_json_to_fitted_chain_correctly(): json_path_load = create_correct_path('test_fitted_chain_convert_to_json') chain = Chain() chain.load(json_path_load) json_actual = chain.save('test_import_json_to_fitted_chain_correctly') with open(json_path_load, 'r') as json_file: json_expected = json.load(json_file) assert json_actual == json.dumps(json_expected)
def test_import_json_to_chain_correctly(): json_path_load = create_correct_path('test_chain_convert_to_json') chain = Chain() chain.load(json_path_load) json_actual = chain.save('test_import_json_to_chain_correctly_1') chain_expected = create_chain() json_expected = chain_expected.save( 'test_import_json_to_chain_correctly_2') assert json.dumps(json_actual) == json.dumps(json_expected)
def test_import_custom_json_object_to_chain_and_fit_correctly_no_exception(): test_file_path = str(os.path.dirname(__file__)) file = '../../data/test_custom_json_template.json' json_path_load = os.path.join(test_file_path, file) train_file_path, test_file_path = get_scoring_case_data_paths() train_data = InputData.from_csv(train_file_path) chain = Chain() chain.load(json_path_load) chain.fit(train_data) chain.save( 'test_import_custom_json_object_to_chain_and_fit_correctly_no_exception' )
def test_save_load_atomized_chain_correctly(): chain = create_chain_with_several_nested_atomized_model() json_actual = chain.save('test_save_load_atomized_chain_correctly') json_path_load = create_correct_path('test_save_load_atomized_chain_correctly') with open(json_path_load, 'r') as json_file: json_expected = json.load(json_file) chain_loaded = Chain() chain_loaded.load(json_path_load) assert chain.length == chain_loaded.length assert json_actual == json.dumps(json_expected)
def test_fitted_chain_cache_correctness_after_export_and_import(): train_file_path, test_file_path = get_scoring_case_data_paths() train_data = InputData.from_csv(train_file_path) chain = Chain(PrimaryNode('logit')) chain.fit(train_data) chain.save('test_fitted_chain_cache_correctness_after_export_and_import') json_path_load = create_correct_path( 'test_fitted_chain_cache_correctness_after_export_and_import') new_chain = Chain() new_chain.load(json_path_load) results = new_chain.fit(train_data) assert results is not None
def run_import_export_example(chain_path): features_options = {'informative': 1, 'bias': 0.0} samples_amount = 100 features_amount = 2 x_train, y_train, x_test, y_test = get_regression_dataset(features_options, samples_amount, features_amount) # Define regression task task = Task(TaskTypesEnum.regression) # Prepare data to train the model train_input = InputData(idx=np.arange(0, len(x_train)), features=x_train, target=y_train, task=task, data_type=DataTypesEnum.table) predict_input = InputData(idx=np.arange(0, len(x_test)), features=x_test, target=None, task=task, data_type=DataTypesEnum.table) # Get chain and fit it chain = get_chain() chain.fit_from_scratch(train_input) predicted_output = chain.predict(predict_input) prediction_before_export = np.array(predicted_output.predict) print(f'Before export {prediction_before_export[:4]}') # Export it chain.save(path=chain_path) # Import chain json_path_load = create_correct_path(chain_path) new_chain = Chain() new_chain.load(json_path_load) predicted_output_after_export = new_chain.predict(predict_input) prediction_after_export = np.array(predicted_output_after_export.predict) print(f'After import {prediction_after_export[:4]}')
def __init__(self, node: Node = None, operation_id: int = None, nodes_from: list = None, path: str = None): # Need use the imports inside the class because of the problem of circular imports. from fedot.core.chains.chain import Chain from fedot.core.chains.chain_template import ChainTemplate from fedot.core.operations.atomized_model import AtomizedModel super().__init__() self.atomized_model_json_path = None self.next_chain_template = None self.chain_template = None if path: chain = Chain() chain.load(path) self.next_chain_template = AtomizedModel(chain) self.chain_template = ChainTemplate(chain) if node: self._operation_to_template(node, operation_id, nodes_from)