def save_multiclass_classification(additional_metrics, model_desc, model_path, fold_cnt, repeat_cnt): max_metrics = additional_metrics["max_metrics"].transpose() confusion_matrix = additional_metrics["confusion_matrix"] with open(os.path.join(model_path, "README.md"), "w", encoding="utf-8") as fout: fout.write(model_desc) fout.write("\n### Metric details\n{}\n\n".format( max_metrics.to_markdown())) fout.write("\n## Confusion matrix\n{}".format( confusion_matrix.to_markdown())) AdditionalMetrics.add_learning_curves(fout) AdditionalMetrics.add_tree_viz(fout, model_path, fold_cnt, repeat_cnt) AdditionalMetrics.add_linear_coefs(fout, model_path, fold_cnt, repeat_cnt) AdditionalMetrics.add_permutation_importance( fout, model_path, fold_cnt, repeat_cnt) plots = additional_metrics.get("additional_plots") if plots is not None: AdditionalPlots.append(fout, model_path, plots) AdditionalMetrics.add_shap_importance(fout, model_path, fold_cnt, repeat_cnt) AdditionalMetrics.add_shap_multiclass(fout, model_path, fold_cnt, repeat_cnt) fout.write("\n\n[<< Go back](../README.md)\n")
def save_binary_classification(additional_metrics, model_desc, model_path): max_metrics = additional_metrics["max_metrics"].transpose() confusion_matrix = additional_metrics["confusion_matrix"] threshold = additional_metrics["threshold"] with open(os.path.join(model_path, "README.md"), "w") as fout: fout.write(model_desc) fout.write("\n## Metric details\n{}\n\n".format( max_metrics.to_markdown())) fout.write("\n## Confusion matrix (at threshold={})\n{}".format( np.round(threshold, 6), confusion_matrix.to_markdown())) AdditionalMetrics.add_learning_curves(fout)
def save_multiclass_classification(additional_metrics, model_desc, model_path): max_metrics = additional_metrics["max_metrics"].transpose() confusion_matrix = additional_metrics["confusion_matrix"] with open(os.path.join(model_path, "README.md"), "w") as fout: fout.write(model_desc) fout.write("\n### Metric details\n{}\n\n".format( max_metrics.to_markdown())) fout.write("\n## Confusion matrix\n{}".format( confusion_matrix.to_markdown())) AdditionalMetrics.add_learning_curves(fout)
def save_multiclass_classification(additional_metrics, model_desc, model_path, fold_cnt, repeat_cnt): max_metrics = additional_metrics["max_metrics"].transpose() confusion_matrix = additional_metrics["confusion_matrix"] with open(os.path.join(model_path, "README.md"), "w") as fout: fout.write(model_desc) fout.write("\n### Metric details\n{}\n\n".format( max_metrics.to_markdown())) fout.write("\n## Confusion matrix\n{}".format( confusion_matrix.to_markdown())) AdditionalMetrics.add_learning_curves(fout) AdditionalMetrics.add_tree_viz(fout, model_path, fold_cnt, repeat_cnt) AdditionalMetrics.add_linear_coefs(fout, model_path, fold_cnt, repeat_cnt) AdditionalMetrics.add_permutation_importance( fout, model_path, fold_cnt, repeat_cnt) AdditionalMetrics.add_shap_importance(fout, model_path, fold_cnt, repeat_cnt) AdditionalMetrics.add_shap_multiclass(fout, model_path, fold_cnt, repeat_cnt)
def save_binary_classification(additional_metrics, model_desc, model_path, fold_cnt, repeat_cnt): max_metrics = additional_metrics["max_metrics"].transpose() confusion_matrix = additional_metrics["confusion_matrix"] threshold = additional_metrics["threshold"] with open(os.path.join(model_path, "README.md"), "w") as fout: fout.write(model_desc) fout.write("\n## Metric details\n{}\n\n".format( max_metrics.to_markdown())) fout.write("\n## Confusion matrix (at threshold={})\n{}".format( np.round(threshold, 6), confusion_matrix.to_markdown())) AdditionalMetrics.add_learning_curves(fout) AdditionalMetrics.add_tree_viz(fout, model_path, fold_cnt, repeat_cnt) AdditionalMetrics.add_linear_coefs(fout, model_path, fold_cnt, repeat_cnt) AdditionalMetrics.add_permutation_importance( fout, model_path, fold_cnt, repeat_cnt) AdditionalMetrics.add_shap_importance(fout, model_path, fold_cnt, repeat_cnt) AdditionalMetrics.add_shap_binary(fout, model_path, fold_cnt, repeat_cnt) fout.write("\n\n[<< Go back](../README.md)\n")