def test_one_operation_analyze_analyze(): # given pipeline, train_data, test_data, node_to_analyze, result_dir = given_data() requirements = SensitivityAnalysisRequirements(hyperparams_analysis_samples_size=1) # when result = OneOperationHPAnalyze(pipeline=pipeline, train_data=train_data, requirements=requirements, test_data=test_data, path_to_save=result_dir). \ analyze(node=node_to_analyze) assert type(result) is dict
def run_analysis(pipeline, train_data, test_data): sa_requirements = SensitivityAnalysisRequirements( is_visualize=True, is_save_results_to_json=True) approaches = [ NodeDeletionAnalyze, NodeReplaceOperationAnalyze, MultiOperationsHPAnalyze ] result_path = join(default_fedot_data_dir(), 'sensitivity', f'{PipelineSensitivityAnalysis.__name__}') PipelineSensitivityAnalysis(pipeline=pipeline, train_data=train_data, test_data=test_data, approaches=approaches, requirements=sa_requirements, path_to_save=result_path).analyze()
def __init__( self, approaches: Optional[List[Type['NodeAnalyzeApproach']]] = None, approaches_requirements: SensitivityAnalysisRequirements = None, path_to_save=None, log: Log = None): self.approaches = [NodeDeletionAnalyze, NodeReplaceOperationAnalyze ] if approaches is None else approaches self.path_to_save = \ join(default_fedot_data_dir(), 'sensitivity', 'nodes_sensitivity') if path_to_save is None else path_to_save self.log = default_log(__name__) if log is None else log self.approaches_requirements = \ SensitivityAnalysisRequirements() if approaches_requirements is None else approaches_requirements
def test_pipeline_analysis_analyze(analyze_method): # given pipeline, train_data, test_data, node_index, result_dir = given_data() requirements = SensitivityAnalysisRequirements(hyperparams_analysis_samples_size=1) # when non_structure_analyze_result = PipelineAnalysis(pipeline=pipeline, train_data=train_data, test_data=test_data, requirements=requirements, path_to_save=result_dir).analyze() # then assert type(non_structure_analyze_result) is list assert analyze_method.called
def __init__(self, pipeline: Pipeline, train_data: InputData, test_data: InputData, requirements: SensitivityAnalysisRequirements = None, path_to_save=None, log: Log = None): self._pipeline = pipeline self._train_data = train_data self._test_data = test_data self.problem: Optional[Problem] = None requirements = SensitivityAnalysisRequirements() if requirements is None else requirements self.requirements: HyperparamsAnalysisMetaParams = requirements.hp_analysis_meta self.analyze_method = analyze_method_by_name.get(self.requirements.analyze_method) self.sample_method = sample_method_by_name.get(self.requirements.sample_method) self.operation_types = None self.path_to_save = \ join(default_fedot_data_dir(), 'sensitivity', 'pipeline_sensitivity') \ if path_to_save is None else path_to_save self.log = default_log(__name__) if log is None else log
def __init__(self, pipeline: Pipeline, train_data, test_data: InputData, requirements: SensitivityAnalysisRequirements = None, path_to_save=None, log: Log = None): self._pipeline = pipeline self._train_data = train_data self._test_data = test_data self._origin_metric = None self._requirements = \ SensitivityAnalysisRequirements() if requirements is None else requirements self._path_to_save = \ join(default_fedot_data_dir(), 'sensitivity', 'nodes_sensitivity') if path_to_save is None else path_to_save self.log = default_log(__name__) if log is None else log if not exists(self._path_to_save): makedirs(self._path_to_save)
def __init__( self, pipeline: Pipeline, train_data: InputData, test_data: InputData, approaches: Optional[List[Type[MultiOperationsHPAnalyze]]] = None, requirements: SensitivityAnalysisRequirements = None, path_to_save=None, log: Log = None): self.pipeline = pipeline self.train_data = train_data self.test_data = test_data self.requirements = \ SensitivityAnalysisRequirements() if requirements is None else requirements self.approaches = [MultiOperationsHPAnalyze ] if approaches is None else approaches self.path_to_save = \ join(default_fedot_data_dir(), 'sensitivity', 'pipeline_sa') if path_to_save is None else path_to_save self.log = default_log(__name__) if log is None else log
def __init__(self, pipeline: Pipeline, train_data: InputData, test_data: InputData, approaches: Optional[List[Type[NodeAnalyzeApproach]]] = None, requirements: SensitivityAnalysisRequirements = None, path_to_save=None, log: Log = None, nodes_to_analyze: List[Node] = None): self.pipeline = pipeline self.train_data = train_data self.test_data = test_data self.approaches = approaches self.requirements = \ SensitivityAnalysisRequirements() if requirements is None else requirements self.metric = self.requirements.metric self.log = default_log(__name__) if log is None else log self.path_to_save = \ join(default_fedot_data_dir(), 'sensitivity', 'nodes_sensitivity') if path_to_save is None else path_to_save if not nodes_to_analyze: self.log.message('Nodes to analyze are not defined. All nodes will be analyzed.') self.nodes_to_analyze = self.pipeline.nodes else: self.nodes_to_analyze = nodes_to_analyze
def __init__(self, pipeline: Pipeline, train_data, test_data: InputData, requirements: SensitivityAnalysisRequirements = None, path_to_save=None, log: Log = None): super().__init__(pipeline, train_data, test_data, path_to_save) requirements = SensitivityAnalysisRequirements( ) if requirements is None else requirements self.requirements: HyperparamsAnalysisMetaParams = requirements.hp_analysis_meta self.analyze_method = analyze_method_by_name.get( self.requirements.analyze_method) self.sample_method = sample_method_by_name.get( self.requirements.sample_method) self.problem = None self.operation_type = None self.data_under_lock: dict = {} self.path_to_save = \ join(default_fedot_data_dir(), 'sensitivity', 'nodes_sensitivity') if path_to_save is None else path_to_save self.log = default_log(__name__) if log is None else log