def post_solver_result(): adapter = SolverResultsStore() adapter.connect() document_id = adapter.store_document(request.json) _LOGGER.info("Solver result stored with document_id %r", document_id) return jsonify({'document_id': document_id}), 201, { 'ContentType': 'application/json' }
def get_liveness(): adapter = SolverResultsStore() adapter.connect() adapter.ceph.check_connection() return jsonify({ 'status': 'ready', 'version': __version__ }), 200, { 'ContentType': 'application/json' }
def post_solver_result(): # Ignore PyDocStyleBear adapter = SolverResultsStore() adapter.connect() document_id = adapter.store_document(request.json) _LOGGER.info("Solver result stored with document_id %r", document_id) return ( jsonify({"document_id": document_id}), 201, { "ContentType": "application/json" }, )
def _healthiness(): """Check service healthiness.""" # Check that Ceph is reachable. adapter = SolverResultsStore() adapter.connect() adapter.ceph.check_connection() return jsonify({ "status": "ready", "version": thoth_user_api.__version__ }), 200, { "ContentType": "application/json" }
def _healthiness(): """Check service healthiness.""" # Check that Ceph is reachable. adapter = SolverResultsStore() adapter.connect() adapter.ceph.check_connection() return jsonify({ 'status': 'ready', 'version': thoth_user_api.__version__ }), 200, { 'ContentType': 'application/json' }
def get_liveness(): # Ignore PyDocStyleBear adapter = SolverResultsStore() adapter.connect() adapter.ceph.check_connection() return ( jsonify({ "status": "ready", "version": __version__ }), 200, { "ContentType": "application/json" }, )
def _healthiness(): """Check service healthiness.""" # Check response from Kubernetes API. response = requests.get(Configuration.KUBERNETES_API_URL, verify=Configuration.KUBERNETES_VERIFY_TLS) response.raise_for_status() # Check that Ceph is reachable. adapter = SolverResultsStore() adapter.connect() adapter.ceph.check_connection() return jsonify({ 'status': 'ready', 'version': thoth_user_api.__version__ }), 200, { 'ContentType': 'application/json' }
from thoth.storages import DependencyMonkeyReportsStore from thoth.common import init_logging from thoth.common import OpenShift import thoth.metrics_exporter.metrics as metrics init_logging() _LOGGER = logging.getLogger(__name__) _MONITORED_STORES = ( AdvisersResultsStore(), AnalysisResultsStore(), InspectionResultsStore(), ProvenanceResultsStore(), PackageAnalysisResultsStore(), SolverResultsStore(), DependencyMonkeyReportsStore(), ) _NAMESPACES_VARIABLES = [ "THOTH_FRONTEND_NAMESPACE", "THOTH_MIDDLETIER_NAMESPACE", "THOTH_BACKEND_NAMESPACE", "THOTH_AMUN_NAMESPACE", "THOTH_AMUN_INSPECTION_NAMESPACE", ] _JOBS_LABELS = [ "component=dependency-monkey", "component=amun-inspection-job", "component=solver",
def _fixture_adapter(): """Retrieve an adapter to build logs.""" return SolverResultsStore(deployment_name=_DEPLOYMENT_NAME, prefix=_BUCKET_PREFIX, **CEPH_INIT_KWARGS)
def construct_prescription_gh_release_notes(*, start_date: Optional[date], end_date: Optional[date]) -> Dict[str, Any]: """Construct prescriptions for GitHub release notes.""" solver_results = SolverResultsStore() solver_results.connect() release_notes = [] release_notes_seen = set() for document_id, doc in solver_results.iterate_results( start_date=start_date, end_date=end_date, include_end_date=True ): if not doc["result"]["tree"]: continue _LOGGER.debug("Processing solver document %r", document_id) # We pick only the first entry, this is OK for deployment as solver is run just once per package. metadata = doc["result"]["tree"][0]["importlib_metadata"]["metadata"] # Check metadata available. version = metadata.get("Version") if not version: continue name = metadata.get("Name") if not name: continue # Do not construct duplicate entries. if (name, version) in release_notes_seen: continue release_notes_seen.add((name, version)) url_candidates = [metadata.get("Home-page")] for url in metadata.get("Project-URL") or []: url_candidates.append(url.rsplit(",", maxsplit=1)[-1].strip()) for url in url_candidates: if not url or not url.startswith("https://github.com"): _LOGGER.debug( "Skipping URL %r as no link to GitHub repository found", url, ) continue url_path_parts = urlparse(url).path.split("/")[1:] if len(url_path_parts) < 2: _LOGGER.warning( "Skipping URL as GitHub repository and organization cannot be parsed", url, ) continue org, repo = url_path_parts[:2] # Try without `v' prefix. release_url = f"https://github.com/{org}/{repo}/releases/tag/{metadata.get('Version')}" try: response = requests.head(release_url, allow_redirects=True) except Exception as exc: _LOGGER.error("Obtaining information from %r failed: %s", release_url, str(exc)) continue if response.status_code == 200: entry = _get_release_notes_entry(org, repo, metadata, has_v_prefix=False) _LOGGER.info("Found GitHub release notes at %s", release_url) release_notes.append(entry) break # Try with `v' prefix. release_url = f"https://github.com/{org}/{repo}/releases/tag/v{metadata.get('Version')}" try: response = requests.head(release_url, allow_redirects=True) except Exception as exc: _LOGGER.error("Obtaining information from %r failed: %s", release_url, str(exc)) continue if response.status_code == 200: entry = _get_release_notes_entry(org, repo, metadata, has_v_prefix=True) _LOGGER.info("Found GitHub release notes at %s", release_url) release_notes.append(entry) break return { "name": "PyPIGitHubReleaseNotesWrap", "type": "wrap.GitHubReleaseNotes", "should_include": { "adviser_pipeline": True, }, "run": { "release_notes": release_notes, }, }