Example #1
0
 def plot_progress(self, ax_client):
     model = Models.GPEI(experiment=ax_client.experiment,
                         data=ax_client.experiment.fetch_data())
     html_elements = [
         plot_config_to_html(ax_client.get_optimization_trace())
     ]
     model_params = get_range_parameters(model)
     try:
         if len(model_params) > 1:
             html_elements.append(
                 plot_config_to_html(
                     interact_contour(
                         model=model,
                         metric_name=self.YR.args.eval_primary_metric)))
         else:
             html_elements.append(
                 plot_config_to_html(
                     interact_slice(
                         model=model,
                         param_name=model_params[0].name,
                         metric_name=self.YR.args.eval_primary_metric)))
     except TypeError:
         pass
     with open(
             os.path.join(self.bayes_opt_root_experiment_folder,
                          "optimization_plots.html"), 'w') as f:
         f.write(render_report_elements(self.experiment_name,
                                        html_elements))
Example #2
0
    def testContours(self):
        exp = get_branin_experiment(with_str_choice_param=True,
                                    with_batch=True)
        exp.trials[0].run()
        model = Models.BOTORCH(
            # Model bridge kwargs
            experiment=exp,
            data=exp.fetch_data(),
        )
        # Assert that each type of plot can be constructed successfully
        plot = plot_contour_plotly(model, model.parameters[0],
                                   model.parameters[1],
                                   list(model.metric_names)[0])
        self.assertIsInstance(plot, go.Figure)
        plot = interact_contour_plotly(model, list(model.metric_names)[0])
        self.assertIsInstance(plot, go.Figure)
        plot = interact_contour(model, list(model.metric_names)[0])
        self.assertIsInstance(plot, AxPlotConfig)
        plot = plot = plot_contour(model, model.parameters[0],
                                   model.parameters[1],
                                   list(model.metric_names)[0])
        self.assertIsInstance(plot, AxPlotConfig)

        # Make sure all parameters and metrics are displayed in tooltips
        tooltips = list(exp.parameters.keys()) + list(exp.metrics.keys())
        for d in plot.data["data"]:
            # Only check scatter plots hoverovers
            if d["type"] != "scatter":
                continue
            for text in d["text"]:
                for tt in tooltips:
                    self.assertTrue(tt in text)
 def plot_progress(self, ax_client):
     model = Models.GPEI(experiment=ax_client.experiment, data=ax_client.experiment.fetch_data())
     html_elements = []
     html_elements.append(plot_config_to_html(ax_client.get_optimization_trace()))
     try:
         html_elements.append(plot_config_to_html(interact_contour(model=model, metric_name=self.YR.args.eval_primary_metric)))
     except:
         pass
     with open(os.path.join(self.bayes_opt_root_experiment_folder, "optimization_plots.html"), 'w') as f:
         f.write(render_report_elements(self.experiment_name, html_elements))
Example #4
0
 def testContours(self):
     exp = get_branin_experiment(with_batch=True)
     exp.trials[0].run()
     model = Models.BOTORCH(
         # Model bridge kwargs
         experiment=exp,
         data=exp.fetch_data(),
     )
     # Assert that each type of plot can be constructed successfully
     plot = plot_contour_plotly(model, model.parameters[0],
                                model.parameters[1],
                                list(model.metric_names)[0])
     self.assertIsInstance(plot, go.Figure)
     plot = interact_contour_plotly(model, list(model.metric_names)[0])
     self.assertIsInstance(plot, go.Figure)
     plot = plot = plot_contour(model, model.parameters[0],
                                model.parameters[1],
                                list(model.metric_names)[0])
     self.assertIsInstance(plot, AxPlotConfig)
     plot = interact_contour(model, list(model.metric_names)[0])
     self.assertIsInstance(plot, AxPlotConfig)
        # Save all experiment parameters
        df = exp.eval().df
        df.to_csv(os.path.join(output_dir, 'exp_eval.csv'), index=False)

        # Save best parameter
        best_arm_name = df.arm_name[df['mean'] == df['mean'].max()].values[0]
        exp_arm = {k: v.parameters for k, v in exp.arms_by_name.items()}
        exp_arm['best'] = best_arm_name
        print('Best arm:\n', str(exp.arms_by_name[best_arm_name]))
        with open(os.path.join(output_dir, 'exp_arm.json'), 'w') as f:
            json.dump(exp_arm, f)

        # Contour Plot
        os.makedirs(os.path.join(output_dir, 'contour_plot'))
        for metric in ['f1', 'precision', 'recall', 'accuracy']:
            contour_plot = interact_contour(model=gpei, metric_name=metric)
            plot(contour_plot.data,
                 filename=os.path.join(output_dir, 'contour_plot',
                                       '{}.html'.format(metric)))

        # Tradeoff Plot
        tradeoff_plot = plot_objective_vs_constraints(gpei, 'f1', rel=False)
        plot(tradeoff_plot.data,
             filename=os.path.join(output_dir, 'tradeoff_plot.html'))

        # Slice Plot
        # show the metric outcome as a function of one parameter while fixing the others
        os.makedirs(os.path.join(output_dir, 'slice_plot'))
        for param in ["lr", "decay", "warmups", "eps"]:
            slice_plot = plot_slice(gpei, param, "f1")
            plot(slice_plot.data,