def _get_config_from_vcap(): vcap_services = util.get_vcap_services_data() config = _get_s3_specific_config(vcap_services) if config is None: config = _get_swift_specific_config(vcap_services) if config is None: config = _get_azure_storage_specific_config(vcap_services) if config is None: return {} else: return config
def _get_license_subscription(): try: vcap_services = util.get_vcap_services_data() if "mendix-platform" in vcap_services: subscription = vcap_services["mendix-platform"][0] logging.debug("Configuring license subscription for [%s]..." % subscription["name"]) credentials = subscription["credentials"] return { "License.EnvironmentName": credentials["environment_id"], "License.LicenseServerURL": credentials["license_server_url"], "License.SubscriptionSecret": credentials["secret"], "License.UseLicenseServer": True, } except Exception as e: logging.warning("Failed to configure license subscription: " + str(e)) return {}
def _verify_vcap_info(self, is_apply_limits_present=True): with mock.patch( "buildpack.databroker.business_events._get_client_config", mock.MagicMock(return_value=self.expected_client_config), ): business_events_cfg = business_events._get_config( util.get_vcap_services_data()) prefix = business_events.CONSTANTS_PREFIX assert business_events_cfg[f"{prefix}.ServerUrl"] == self.server_url assert business_events_cfg[f"{prefix}.Password"] == self.password assert business_events_cfg[f"{prefix}.UserName"] == self.username assert (business_events_cfg[f"{prefix}.ClientConfiguration"] == self.expected_client_config) if is_apply_limits_present: assert (business_events_cfg[f"{prefix}.ApplyLimits"] == self.apply_limits)
def get_config(m2ee): vcap_services = util.get_vcap_services_data() config = _get_s3_specific_config(vcap_services, m2ee) if config is None: config = _get_swift_specific_config(vcap_services, m2ee) if config is None: config = _get_azure_storage_specific_config(vcap_services, m2ee) if config is None: logging.warning( "External file store not configured, uploaded files in the app " "will not persist across restarts. See https://github.com/mendix/" "cf-mendix-buildpack for file store configuration details." ) return {} else: return config
def __init__(self): self.vcap_services = util.get_vcap_services_data()
def get_new_relic_license_key(): vcap_services = util.get_vcap_services_data() if vcap_services and "newrelic" in vcap_services: return vcap_services["newrelic"][0]["credentials"]["licenseKey"] return None
def test_business_events_config_with_empty_creds(self): os.environ["VCAP_SERVICES"] = self.kafka_shared_vcap_with_null_creds # make sure any exceptions in the business events does not cause any errors business_events_cfg = business_events._get_config( util.get_vcap_services_data())
( databroker_jmx_instance_cfg, databroker_jmx_config_files, ) = databroker_processes.get_datadog_config( datadog._get_user_checks_dir()) datadog.update_config( m2ee, model_version=model_version, runtime_version=runtime_version, extra_jmx_instance_config=databroker_jmx_instance_cfg, jmx_config_files=databroker_jmx_config_files, ) nginx.update_config() databroker.update_config(m2ee) databroker.business_events.update_config(m2ee, util.get_vcap_services_data()) # Start components and runtime telegraf.run(runtime_version) datadog.run(model_version, runtime_version) metering.run() logs.run(m2ee) runtime.run(m2ee, logs.get_loglevels()) metrics.run(m2ee) appdynamics.run() nginx.run() # Wait for the runtime to be ready before starting Databroker if databroker.is_enabled(): runtime.await_database_ready(m2ee) databroker_processes.run(database.get_config())
def run(): vcap_services = util.get_vcap_services_data() schnapps = None amazon_s3 = None for key in vcap_services: if key.startswith("amazon-s3"): amazon_s3 = key if key.startswith("schnapps"): schnapps = key if not vcap_services or schnapps not in vcap_services: logging.debug("No backup service detected") return backup_service = {} if amazon_s3 in vcap_services: s3_credentials = vcap_services[amazon_s3][0]["credentials"] backup_service["filesCredentials"] = { "accessKey": s3_credentials["access_key_id"], "secretKey": s3_credentials["secret_access_key"], "bucketName": s3_credentials["bucket"], } if "key_suffix" in s3_credentials: # Not all s3 plans have this field backup_service["filesCredentials"]["keySuffix"] = s3_credentials[ "key_suffix"] try: db_config = database.get_config() if db_config["DatabaseType"] != "PostgreSQL": raise Exception("Schnapps only supports postgresql, not %s" % db_config["DatabaseType"]) host_and_port = db_config["DatabaseHost"].split(":") backup_service["databaseCredentials"] = { "host": host_and_port[0], "username": db_config["DatabaseUserName"], "password": db_config["DatabasePassword"], "dbname": db_config["DatabaseName"], "port": int(host_and_port[1]) if len(host_and_port) > 1 else 5432, } except Exception as e: logging.exception( "Schnapps will not be activated because error occurred with " "parsing the database credentials") return schnapps_url = vcap_services[schnapps][0]["credentials"]["url"] schnapps_api_key = vcap_services[schnapps][0]["credentials"]["apiKey"] try: result = requests.put( schnapps_url, headers={ "Content-Type": "application/json", "apiKey": schnapps_api_key, }, data=json.dumps(backup_service), ) except requests.exceptions.SSLError as e: logging.warning("Failed to contact backup service. SSLError: %s", str(e)) return except Exception as e: logging.warning("Failed to contact backup service: ", exc_info=True) return if result.status_code == 200: logging.info("Successfully updated backup service") else: logging.warning("Failed to update backup service: " + result.text)