def extraction(msg, extra, remove=True): metrics.extraction_count.inc() facts = {"system_profile": {}} try: with NamedTemporaryFile(delete=remove) as tf: tf.write(get_archive(msg["url"])) tf.flush() logger.debug("extracting facts from %s", tf.name, extra=extra) with extract(tf.name) as ex: facts = get_canonical_facts(path=ex.tmp_dir) facts["system_profile"] = get_system_profile(path=ex.tmp_dir) except Exception as e: logger.exception("Failed to extract facts: %s", str(e), extra=extra) facts["error"] = str(e) finally: if facts["system_profile"].get("display_name"): facts["display_name"] = facts["system_profile"].get("display_name") if facts["system_profile"].get("satellite_id"): facts["satellite_id"] = facts["system_profile"].get("satellite_id") if facts["system_profile"].get("tags"): facts["tags"] = facts["system_profile"].pop("tags") groomed_facts = _remove_empties(_remove_bad_display_name(facts)) metrics.msg_processed.inc() metrics.extract_success.inc() return groomed_facts
def checkin(self): ''' Sends an ultralight check-in request containing only the Canonical Facts. ''' logger.info("Checking in...") try: canonical_facts = get_canonical_facts() except Exception as e: logger.debug('Error getting canonical facts: %s', e) logger.debug('Falling back to only machine ID.') insights_id = generate_machine_id() canonical_facts = {"insights_id": str(insights_id)} url = self.inventory_url + "/hosts/checkin" logger.debug("Sending check-in request to %s with %s" % (url, canonical_facts)) try: response = self.post(url, headers={"Content-Type": "application/json"}, data=json.dumps(canonical_facts)) # Change to POST when the API is fixed. except REQUEST_FAILED_EXCEPTIONS as exception: _api_request_failed(exception) return None logger.debug("Check-in response status code %d" % response.status_code) if response.status_code == requests.codes.CREATED: # Remove OK when the API is fixed. logger.info("Successfully checked in!") return True elif response.status_code == requests.codes.NOT_FOUND: # Remove BAD_REQUEST when the API is fixed. _host_not_found() else: logger.debug("Check-in response body %s" % response.text) raise RuntimeError("Unknown check-in API response")
def upload_archive(self, data_collected, content_type, duration=None): """ Do an HTTPS Upload of the archive """ if self.config.legacy_upload: return self._legacy_upload_archive(data_collected, duration) file_name = os.path.basename(data_collected) upload_url = self.upload_url c_facts = {} try: c_facts = get_canonical_facts() except Exception as e: logger.debug('Error getting canonical facts: %s', e) if self.config.display_name: # add display_name to canonical facts c_facts['display_name'] = self.config.display_name if self.config.ansible_host: # add ansible_host to canonical facts c_facts['ansible_host'] = self.config.ansible_host if self.config.branch_info: c_facts["branch_info"] = self.config.branch_info c_facts["satellite_id"] = self.config.branch_info["remote_leaf"] c_facts = json.dumps(c_facts) logger.debug('Canonical facts collected:\n%s', c_facts) files = { 'file': (file_name, open(data_collected, 'rb'), content_type), 'metadata': c_facts } logger.debug("Uploading %s to %s", data_collected, upload_url) logger.log(NETWORK, "POST %s", upload_url) upload = self.session.post(upload_url, files=files, headers={}) logger.log(NETWORK, "Upload status: %s %s %s", upload.status_code, upload.reason, upload.text) logger.debug('Request ID: %s', upload.headers.get('x-rh-insights-request-id', None)) if upload.status_code in (200, 202): # 202 from platform, no json response logger.debug(upload.text) # upload = registration on platform try: write_registered_file() except OSError as e: if e.errno == errno.EACCES and os.getuid() != 0: # if permissions error as non-root, ignore pass else: logger.error( 'Could not update local registration record: %s', str(e)) else: logger.debug("Upload archive failed with status code %s", upload.status_code) return upload logger.debug("Upload duration: %s", upload.elapsed) return upload
async def extract_facts(archive): logger.info("extracting facts from %s", archive) facts = {} try: with extract(archive) as ex: facts = get_canonical_facts(path=ex.tmp_dir) except (InvalidContentType, KeyError) as e: facts['error'] = e.args[0] return facts
def extract_facts(archive): # TODO: facts, system_profiles, and errors are all passed through via the # 'facts' hash. These should likely be split out. logger.info("extracting facts from %s", archive) facts = {} try: with extract(archive) as ex: facts = get_canonical_facts(path=ex.tmp_dir) facts['system_profile'] = get_system_profile(path=ex.tmp_dir) except Exception as e: logger.exception("Failed to extract facts") facts['error'] = e groomed_facts = _remove_empties(_remove_bad_display_name(facts)) return groomed_facts
def upload_archive(self, data_collected, content_type, duration): """ Do an HTTPS Upload of the archive """ if self.config.legacy_upload: return self._legacy_upload_archive(data_collected, duration) file_name = os.path.basename(data_collected) upload_url = self.upload_url c_facts = {} try: c_facts = get_canonical_facts() except Exception as e: logger.debug('Error getting canonical facts: %s', e) if self.config.display_name: # add display_name to canonical facts c_facts['display_name'] = self.config.display_name if self.config.branch_info: c_facts["satellite_id"] = self.config.branch_info["remote_leaf"] c_facts = json.dumps(c_facts) logger.debug('Canonical facts collected:\n%s', c_facts) files = { 'file': (file_name, open(data_collected, 'rb'), content_type), 'metadata': c_facts } logger.debug("Uploading %s to %s", data_collected, upload_url) net_logger.info("POST %s", upload_url) upload = self.session.post(upload_url, files=files, headers={}) logger.debug("Upload status: %s %s %s", upload.status_code, upload.reason, upload.text) logger.debug('Request ID: %s', upload.headers.get('x-rh-insights-request-id', None)) if upload.status_code in (200, 202): # 202 from platform, no json response logger.debug(upload.text) # upload = registration on platform write_registered_file() else: logger.debug( "Upload archive failed with status code %s", upload.status_code) return upload logger.debug("Upload duration: %s", upload.elapsed) return upload
def extraction(msg, extra, remove=True): facts = {"system_profile": {}} try: with NamedTemporaryFile(delete=remove) as tf: tf.write(get_archive(msg["url"])) logger.debug("extracting facts from %s", tf.name, extra=extra) with extract(tf.name) as ex: facts = get_canonical_facts(path=ex.tmp_dir) facts["system_profile"] = get_system_profile(path=ex.tmp_dir) except Exception as e: logger.exception("Failed to extract facts: %s", e, extra=extra) facts["error"] = e finally: if facts["system_profile"].get("display_name"): facts["display_name"] = facts["system_profile"].get("display_name") groomed_facts = _remove_empties(_remove_bad_display_name(facts)) return groomed_facts
def extract(msg, extra, remove=True): """ Perform the extraction of canonical system facts and system profile. """ facts = {"system_profile": {}} with unpacked_archive(msg, remove) as unpacked: try: facts = get_canonical_facts(unpacked.tmp_dir) facts['system_profile'] = get_system_profile(unpacked.tmp_dir) except Exception as e: logger.exception("Failed to extract facts: %s", str(e), extra=extra) facts["error"] = str(e) finally: facts = postprocess(facts) metrics.msg_processed.inc() metrics.extract_success.inc() return facts
def extract_facts(data, request_id, account, extra, remove=True): # TODO: facts, system_profiles, and errors are all passed through via the # 'facts' hash. These should likely be split out. facts = {} try: with NamedTemporaryFile(delete=remove) as tf: tf.write(data) data = None logger.info("extracting facts from %s", tf.name, extra=extra) with extract(tf.name) as ex: facts = get_canonical_facts(path=ex.tmp_dir) facts['system_profile'] = get_system_profile(path=ex.tmp_dir) except Exception as e: logger.exception("Failed to extract facts: %s", e, extra=extra) facts['error'] = e else: logger.info("Successfully extracted canonical facts", extra=extra) finally: if facts['system_profile'].get('display_name'): facts['display_name'] = facts['system_profile'].get('display_name') groomed_facts = _remove_empties(_remove_bad_display_name(facts)) return groomed_facts
def __init__(self, config): self.config = config self.conn = InsightsConnection(config) self.hostname = get_canonical_facts().get('fqdn', '') self.archive = InsightsArchive(config)
def upload_archive(self, data_collected, content_type, duration): """ Do an HTTPS Upload of the archive """ file_name = os.path.basename(data_collected) upload_url = self.upload_url try: c_facts = json.dumps(get_canonical_facts()) except Exception as e: logger.debug('Error getting canonical facts: %s', e) c_facts = None files = {} # legacy upload if self.config.legacy_upload: try: from insights.contrib import magic m = magic.open(magic.MAGIC_MIME) m.load() content_type = m.file(data_collected) except ImportError: magic = None logger.debug( 'python-magic not installed, using backup function...') from .utilities import magic_plan_b content_type = magic_plan_b(data_collected) if self.config.analyze_container: logger.debug( 'Uploading container, image, mountpoint or tarfile.') else: logger.debug('Uploading a host.') upload_url = self.upload_url + '/' + generate_machine_id() headers = {'x-rh-collection-time': str(duration)} else: headers = {} files['metadata'] = c_facts files['file'] = (file_name, open(data_collected, 'rb'), content_type) logger.debug("Uploading %s to %s", data_collected, upload_url) net_logger.info("POST %s", upload_url) upload = self.session.post(upload_url, files=files, headers=headers) logger.debug("Upload status: %s %s %s", upload.status_code, upload.reason, upload.text) if upload.status_code in (200, 201): # 200/201 from legacy, load the response the_json = json.loads(upload.text) elif upload.status_code == 202: # 202 from platform, no json response logger.debug(upload.text) else: logger.error("Upload archive failed with status code %s", upload.status_code) return upload try: self.config.account_number = the_json["upload"]["account_number"] except: self.config.account_number = None logger.debug("Upload duration: %s", upload.elapsed) return upload