def post_dependency_monkey_report(): # Ignore PyDocStyleBear adapter = DependencyMonkeyReportsStore() adapter.connect() document_id = adapter.store_document(request.json) _LOGGER.info("Dependency Monkey report stored with document_id %r", document_id) return jsonify({'document_id': document_id}), 201, { 'ContentType': 'application/json' }
def get_dependency_monkey_report(analysis_id: str) -> tuple: """Retrieve a dependency monkey run report.""" parameters = {"analysis_id": analysis_id} adapter = DependencyMonkeyReportsStore() adapter.connect() try: document = adapter.retrieve_document(analysis_id) except NotFoundError: return ( { "parameters": parameters, "error": f"Report with the given id {analysis_id} was not found", }, 404, ) return {"parameters": parameters, "report": document}, 200
from thoth.common import init_logging from thoth.common import OpenShift import thoth.metrics_exporter.metrics as metrics init_logging() _LOGGER = logging.getLogger(__name__) _MONITORED_STORES = ( AdvisersResultsStore(), AnalysisResultsStore(), InspectionResultsStore(), ProvenanceResultsStore(), PackageAnalysisResultsStore(), SolverResultsStore(), DependencyMonkeyReportsStore(), ) _NAMESPACES_VARIABLES = [ "THOTH_FRONTEND_NAMESPACE", "THOTH_MIDDLETIER_NAMESPACE", "THOTH_BACKEND_NAMESPACE", "THOTH_AMUN_NAMESPACE", "THOTH_AMUN_INSPECTION_NAMESPACE", ] _JOBS_LABELS = [ "component=dependency-monkey", "component=amun-inspection-job", "component=solver", "component=package-extract",
def aggregate_dm_results_per_identifier( identifiers_inspection: List[str], limit_results: bool = False, max_batch_identifiers_ids: int = 5) -> Union[dict, List[str]]: """Aggregate inspection batch ids and specification from dm documents stored in Ceph. :param inspection_identifier: list of identifier/s to filter inspection batch ids :param limit_results: limit inspection batch ids considered to `max_batch_identifiers_ids` to test analysis :param max_batch_identifiers_ids: maximum number of inspection batch ids considered """ dm_store = DependencyMonkeyReportsStore() dm_store.connect() dm_ids = list(dm_store.get_document_listing()) _LOGGER.info("Number of DM reports identified is: %r" % len(dm_ids)) dm_info_dict = {} i_batch_identifiers = [] number_dm_ids = len(dm_ids) i_batch_counter = 0 if limit_results: _LOGGER.info( f"Limiting results to {max_batch_identifiers_ids} to test functions!!" ) for current_dm_counter, ids in enumerate(dm_ids): document = dm_store.retrieve_document(ids) _LOGGER.info(f"Analysis n.{current_dm_counter + 1}/{number_dm_ids}") report = document["result"].get("report") i_batch_ids_specifications = {} i_batch_ids_specifications, i_batch_identifiers, i_batch_counter = _extract_dm_responses_from_report( report=report, inspection_specifications=i_batch_ids_specifications, i_batch_identifiers=i_batch_identifiers, identifiers=identifiers_inspection, i_batch_counter=i_batch_counter, max_ids=max_batch_identifiers_ids, limit_results=limit_results, ) if i_batch_ids_specifications: _LOGGER.info( f"\nTot inspections batches identified: {len(i_batch_ids_specifications)}" ) dm_info_dict[ids] = {} dm_info_dict[ids] = i_batch_ids_specifications else: _LOGGER.info(f"No inspections batches identified") if limit_results: if i_batch_counter > max_batch_identifiers_ids: _LOGGER.info( f"\nTot inspections batch for the analysis: {len(i_batch_identifiers)}" ) return dm_info_dict, i_batch_identifiers _LOGGER.info( f"Tot inspections batch considered: {len(i_batch_identifiers)}") return dm_info_dict, i_batch_identifiers