def do_check_health(self, args): if self._report_not_implemented('2.5.4') or self._report_not_running(): return health_response = self.m2ee.client.check_health() if not health_response.has_error(): feedback = health_response.get_feedback() if feedback['health'] == 'healthy': logger.info("Health check microflow says the application is " "healthy.") elif feedback['health'] == 'sick': logger.warning("Health check microflow says the application " "is sick: %s" % feedback['diagnosis']) elif feedback['health'] == 'unknown': logger.info("Health check microflow is not configured, no " "health information available.") else: logger.error("Unexpected health check status: %s" % feedback['health']) else: runtime_version = self.m2ee.config.get_runtime_version() if (health_response.get_result() == 3 and runtime_version // ('2.5.4', '2.5.5')): # Because of an incomplete implementation, in Mendix 2.5.4 or # 2.5.5 this means that the runtime is health-check # capable, but no health check microflow is defined. logger.info("Health check microflow is probably not " "configured, no health information available.") else: health_response.display_error()
def get_scheduled_events(metadata): scheduled_events = os.getenv('SCHEDULED_EVENTS', None) if scheduled_events is None or scheduled_events == 'ALL': logger.debug('Enabling all scheduled events') return ('ALL', None) elif scheduled_events == 'NONE': logger.debug('Disabling all scheduled events') return ('NONE', None) else: parsed_scheduled_events = scheduled_events.split(',') metadata_scheduled_events = [ scheduled_event['Name'] for scheduled_event in metadata['ScheduledEvents'] ] result = [] for scheduled_event in parsed_scheduled_events: if scheduled_event not in metadata_scheduled_events: logger.warning( 'Scheduled event defined but not detected in model: "%s"' % scheduled_event ) else: result.append(scheduled_events) logger.debug('Enabling scheduled events %s' % ','.join(result)) return ('SPECIFIED', result)
def get_custom_runtime_settings(): custom_runtime_settings = {} custom_runtime_settings_json = os.environ.get( 'CUSTOM_RUNTIME_SETTINGS', json.dumps(custom_runtime_settings)) try: custom_runtime_settings = json.loads(custom_runtime_settings_json) except Exception as e: logger.warning('Failed to parse CUSTOM_RUNTIME_SETTINGS: ' + str(e)) for k, v in os.environ.iteritems(): if k.startswith('MXRUNTIME_'): custom_runtime_settings[k.replace('MXRUNTIME_', '', 1).replace('_', '.')] = v return custom_runtime_settings
def set_jvm_memory(m2ee_section, vcap, java_version): max_memory = os.environ.get("MEMORY_LIMIT") if max_memory: match = re.search("([0-9]+)M", max_memory.upper()) limit = int(match.group(1)) else: limit = int(vcap["limits"]["mem"]) if limit >= 8192: heap_size = limit - 2048 elif limit >= 4096: heap_size = limit - 1536 elif limit >= 2048: heap_size = limit - 1024 else: heap_size = int(limit / 2) heap_size = str(heap_size) + "M" env_heap_size = os.environ.get("HEAP_SIZE") if env_heap_size: if int(env_heap_size[:-1]) < limit: heap_size = env_heap_size else: logger.warning( "specified heap size {} is larger than max memory of the " "container ({}), falling back to a heap size of {}".format( env_heap_size, str(limit) + "M", heap_size)) javaopts = m2ee_section["javaopts"] javaopts.append("-Xmx%s" % heap_size) javaopts.append("-Xms%s" % heap_size) if java_version.startswith("7"): javaopts.append("-XX:MaxPermSize=256M") else: javaopts.append("-XX:MaxMetaspaceSize=256M") logger.debug("Java heap size set to %s" % heap_size) if os.getenv("MALLOC_ARENA_MAX"): logger.info("Using provided environment setting for MALLOC_ARENA_MAX") else: m2ee_section["custom_environment"]["MALLOC_ARENA_MAX"] = str( max(1, limit / 1024) * 2)
def set_runtime_config(metadata, mxruntime_config, vcap_data, m2ee): scheduled_event_execution, my_scheduled_events = get_scheduled_events( metadata ) app_config = { "ApplicationRootUrl": "https://%s" % vcap_data["application_uris"][0], "MicroflowConstants": get_constants(metadata), "ScheduledEventExecution": scheduled_event_execution, } if my_scheduled_events is not None: app_config["MyScheduledEvents"] = my_scheduled_events if is_development_mode(): logger.warning( "Runtime is being started in Development Mode. Set " 'DEVELOPMENT_MODE to "false" (currently "true") to ' "set it to production." ) app_config["DTAPMode"] = "D" if m2ee.config.get_runtime_version() >= 7 and not i_am_primary_instance(): app_config["com.mendix.core.isClusterSlave"] = "true" elif ( m2ee.config.get_runtime_version() >= 5.15 and os.getenv("ENABLE_STICKY_SESSIONS", "false").lower() == "true" ): logger.info("Enabling sticky sessions") app_config["com.mendix.core.SessionIdCookieName"] = "JSESSIONID" buildpackutil.mkdir_p(os.path.join(os.getcwd(), "model", "resources")) mxruntime_config.update(app_config) # db configuration might be None, database should then be set up with # MXRUNTIME_Database... custom runtime settings. runtime_db_config = database_config.get_database_config( development_mode=is_development_mode() ) if runtime_db_config: mxruntime_config.update(runtime_db_config) mxruntime_config.update(get_filestore_config(m2ee)) mxruntime_config.update(get_certificate_authorities()) mxruntime_config.update(get_client_certificates()) mxruntime_config.update(get_custom_settings(metadata, mxruntime_config)) mxruntime_config.update(get_license_subscription()) mxruntime_config.update(get_custom_runtime_settings())
def get_custom_runtime_settings(): custom_runtime_settings = {} custom_runtime_settings_json = os.environ.get( "CUSTOM_RUNTIME_SETTINGS", json.dumps(custom_runtime_settings) ) try: custom_runtime_settings = json.loads(custom_runtime_settings_json) except Exception as e: logger.warning("Failed to parse CUSTOM_RUNTIME_SETTINGS: " + str(e)) for k, v in os.environ.items(): if k.startswith("MXRUNTIME_"): custom_runtime_settings[ k.replace("MXRUNTIME_", "", 1).replace("_", ".") ] = v return custom_runtime_settings
def get_license_subscription(): try: vcap_services = buildpackutil.get_vcap_services_data() if "mendix-platform" in vcap_services: subscription = vcap_services["mendix-platform"][0] logger.debug("Configuring license subscription for %s" % subscription["name"]) credentials = subscription["credentials"] return { "License.EnvironmentName": credentials["environment_id"], "License.LicenseServerURL": credentials["license_server_url"], "License.SubscriptionSecret": credentials["secret"], "License.UseLicenseServer": True, } except Exception as e: logger.warning("Failed to configure license subscription: " + str(e)) return {}
def _get_azure_storage_specific_config(vcap_services, m2ee): if 'azure-storage' not in vcap_services: return None if m2ee.config.get_runtime_version() < 6.7: logger.warning('Can not configure Azure Storage with Mendix < 6.7') return None creds = vcap_services['azure-storage'][0]['credentials'] container_name = os.getenv('AZURE_CONTAINER_NAME', 'mendix') return { 'com.mendix.core.StorageService': 'com.mendix.storage.azure', 'com.mendix.storage.azure.Container': container_name, 'com.mendix.storage.azure.AccountName': creds['storage_account_name'], 'com.mendix.storage.azure.AccountKey': creds['primary_access_key'], }
def _get_azure_storage_specific_config(vcap_services, m2ee): if "azure-storage" not in vcap_services: return None if m2ee.config.get_runtime_version() < 6.7: logger.warning("Can not configure Azure Storage with Mendix < 6.7") return None creds = vcap_services["azure-storage"][0]["credentials"] container_name = os.getenv("AZURE_CONTAINER_NAME", "mendix") return { "com.mendix.core.StorageService": "com.mendix.storage.azure", "com.mendix.storage.azure.Container": container_name, "com.mendix.storage.azure.AccountName": creds["storage_account_name"], "com.mendix.storage.azure.AccountKey": creds["primary_access_key"], }
def get_filestore_config(m2ee): vcap_services = buildpackutil.get_vcap_services_data() config = _get_s3_specific_config(vcap_services, m2ee) if config is None: config = _get_swift_specific_config(vcap_services, m2ee) if config is None: config = _get_azure_storage_specific_config(vcap_services, m2ee) if config is None: logger.warning( 'External file store not configured, uploaded files in the app ' 'will not persist across restarts. See https://github.com/mendix/' 'cf-mendix-buildpack for file store configuration details.') return {} else: return config
def service_backups(): vcap_services = buildpackutil.get_vcap_services_data() if not vcap_services or 'schnapps' not in vcap_services: logger.info("No backup service detected") return backup_service = {} if 'amazon-s3' in vcap_services: s3_credentials = vcap_services['amazon-s3'][0]['credentials'] backup_service['filesCredentials'] = { 'accessKey': s3_credentials['access_key_id'], 'secretKey': s3_credentials['secret_access_key'], 'bucketName': s3_credentials['bucket'], } if 'key_suffix' in s3_credentials: # Not all s3 plans have this field backup_service['filesCredentials']['keySuffix'] = s3_credentials['key_suffix'] if 'PostgreSQL' in vcap_services: db_config = buildpackutil.get_database_config() host_and_port = db_config['DatabaseHost'].split(':') backup_service['databaseCredentials'] = { 'host': host_and_port[0], 'username': db_config['DatabaseUserName'], 'password': db_config['DatabasePassword'], 'dbname': db_config['DatabaseName'], 'port': int(host_and_port[1]) if len(host_and_port) > 1 else 5432, } schnapps_url = vcap_services['schnapps'][0]['credentials']['url'] schnapps_api_key = vcap_services['schnapps'][0]['credentials']['apiKey'] result = requests.put( schnapps_url, headers={ 'Content-Type': 'application/json', 'apiKey': schnapps_api_key }, data=json.dumps(backup_service), ) if result.status_code == 200: logger.info("Successfully updated backup service") else: logger.warning("Failed to update backup service: " + result.text)
def set_runtime_config(metadata, mxruntime_config, vcap_data, m2ee): scheduled_event_execution, my_scheduled_events = ( get_scheduled_events(metadata) ) app_config = { 'ApplicationRootUrl': 'https://%s' % vcap_data['application_uris'][0], 'MicroflowConstants': get_constants(metadata), 'ScheduledEventExecution': scheduled_event_execution, } if my_scheduled_events is not None: app_config['MyScheduledEvents'] = my_scheduled_events if is_development_mode(): logger.warning( 'Runtime is being started in Development Mode. Set ' 'DEVELOPMENT_MODE to "false" (currently "true") to ' 'set it to production.' ) app_config['DTAPMode'] = 'D' if (m2ee.config.get_runtime_version() >= 7 and not i_am_primary_instance()): app_config['com.mendix.core.isClusterSlave'] = 'true' elif (m2ee.config.get_runtime_version() >= 5.15 and os.getenv('ENABLE_STICKY_SESSIONS', 'false').lower() == 'true'): logger.info('Enabling sticky sessions') app_config['com.mendix.core.SessionIdCookieName'] = 'JSESSIONID' mxruntime_config.update(app_config) mxruntime_config.update(buildpackutil.get_database_config( development_mode=is_development_mode(), )) mxruntime_config.update(get_filestore_config(m2ee)) mxruntime_config.update(get_certificate_authorities()) mxruntime_config.update(get_client_certificates()) mxruntime_config.update(get_custom_settings(metadata, mxruntime_config)) for k, v in os.environ.iteritems(): if k.startswith('MXRUNTIME_'): mxruntime_config[ k.replace('MXRUNTIME_', '', 1).replace('_', '.') ] = v
def emit(self, stats): try: response = requests.post(self.metrics_url, json=stats, timeout=10) except Exception as e: logger.warning("Failed to send metrics to trends server.", exc_info=True) # Fallback to old pipeline and stdout for now. # Later, we will want to buffer and resend. # This will be done in DEP-75. self.fallback_emitter.emit(stats) return if response.status_code != 200: logger.warning( "Failed to send metrics to trends server. Falling back to old " "loggregator based method. Got status code %s " "for URL %s, with body %s.", response.status_code, self.metrics_url, response.text) self.fallback_emitter.emit(stats)
def set_runtime_config(metadata, mxruntime_config, vcap_data, m2ee): scheduled_event_execution, my_scheduled_events = ( get_scheduled_events(metadata) ) app_config = { 'ApplicationRootUrl': 'https://%s' % vcap_data['application_uris'][0], 'MicroflowConstants': get_constants(metadata), 'ScheduledEventExecution': scheduled_event_execution, } if my_scheduled_events is not None: app_config['MyScheduledEvents'] = my_scheduled_events if is_development_mode(): logger.warning( 'Runtime is being started in Development Mode. Set ' 'DEVELOPMENT_MODE to "false" (currently "true") to ' 'set it to production.' ) app_config['DTAPMode'] = 'D' if (m2ee.config.get_runtime_version() >= 5.15 and os.getenv('ENABLE_STICKY_SESSIONS', 'false').lower() == 'true' and not is_cluster_enabled()): logger.info('Enabling sticky sessions') app_config['com.mendix.core.SessionIdCookieName'] = 'JSESSIONID' mxruntime_config.update(app_config) mxruntime_config.update(buildpackutil.get_database_config( development_mode=is_development_mode(), )) mxruntime_config.update(get_filestore_config(m2ee)) mxruntime_config.update(get_cluster_config()) mxruntime_config.update(get_certificate_authorities()) mxruntime_config.update(get_custom_settings(metadata, mxruntime_config)) for k, v in os.environ.iteritems(): if k.startswith('MXRUNTIME_'): mxruntime_config[ k.replace('MXRUNTIME_', '', 1).replace('_', '.') ] = v
def parse_headers(): header_config = "" headers_from_json = {} # this is kept for X-Frame-Options backward compatibility x_frame_options = os.environ.get("X_FRAME_OPTIONS", "ALLOW") if x_frame_options != "ALLOW": headers_from_json["X-Frame-Options"] = x_frame_options headers_json = os.environ.get("HTTP_RESPONSE_HEADERS", "{}") try: headers_from_json.update(json.loads(headers_json)) except Exception as e: logger.error( "Failed to parse HTTP_RESPONSE_HEADERS, due to invalid JSON string: '{}'".format( headers_json ), exc_info=True, ) raise for header_key, header_value in headers_from_json.items(): regEx = DEFAULT_HEADERS[header_key] if regEx and re.match(regEx, header_value): escaped_value = header_value.replace('"', '\\"').replace( "'", "\\'" ) header_config += "add_header {} '{}';\n".format( header_key, escaped_value ) logger.debug("Added header {} to nginx config".format(header_key)) else: logger.warning( "Skipping {} config, value '{}' is not valid".format( header_key, header_value ) ) return header_config
def do_POST(self): try: form = cgi.FieldStorage(fp=self.rfile, headers=self.headers, environ={ 'REQUEST_METHOD': 'POST', 'CONTENT_TYPE': self.headers['Content-Type'], }) if 'file' in form: with open(MPK_FILE, 'wb') as output: shutil.copyfileobj(form['file'].file, output) mxbuild_response = build() logger.debug(mxbuild_response) if mxbuild_response['status'] != 'Success': logger.warning('Failed to build project, ' 'keeping previous model running') state = 'FAILED' elif mxbuild_response['restartRequired'] is True: logger.info('Restarting app after MPK push') self.server.restart_callback() state = 'STARTED' else: logger.info('Reloading model after MPK push') self.server.reload_callback() state = 'STARTED' return self._terminate(200, { 'state': state, }, mxbuild_response) else: return self._terminate(401, { 'state': 'FAILED', 'errordetails': 'No MPK found', }) except Exception: return self._terminate(500, { 'state': 'FAILED', 'errordetails': traceback.format_exc(), })
def _get_swift_specific_config(vcap_services, m2ee): if "Object-Storage" not in vcap_services: return None if m2ee.config.get_runtime_version() < 6.7: logger.warning("Can not configure Object Storage with Mendix < 6.7") return None creds = vcap_services["Object-Storage"][0]["credentials"] container_name = os.getenv("SWIFT_CONTAINER_NAME", "mendix") return { "com.mendix.core.StorageService": "com.mendix.storage.swift", "com.mendix.storage.swift.Container": container_name, "com.mendix.storage.swift.Container.AutoCreate": True, "com.mendix.storage.swift.credentials.DomainId": creds["domainId"], "com.mendix.storage.swift.credentials.Authurl": creds["auth_url"], "com.mendix.storage.swift.credentials.Username": creds["username"], "com.mendix.storage.swift.credentials.Password": creds["password"], "com.mendix.storage.swift.credentials.Region": creds["region"], }
def _get_swift_specific_config(vcap_services, m2ee): if 'Object-Storage' not in vcap_services: return None if m2ee.config.get_runtime_version() < 6.7: logger.warning('Can not configure Object Storage with Mendix < 6.7') return None creds = vcap_services['Object-Storage'][0]['credentials'] container_name = os.getenv('SWIFT_CONTAINER_NAME', 'mendix') return { 'com.mendix.core.StorageService': 'com.mendix.storage.swift', 'com.mendix.storage.swift.Container': container_name, 'com.mendix.storage.swift.Container.AutoCreate': True, 'com.mendix.storage.swift.credentials.DomainId': creds['domainId'], 'com.mendix.storage.swift.credentials.Authurl': creds['auth_url'], 'com.mendix.storage.swift.credentials.Username': creds['username'], 'com.mendix.storage.swift.credentials.Password': creds['password'], 'com.mendix.storage.swift.credentials.Region': creds['region'], }
def process_request(self): try: form = MyFieldStorage( fp=self.rfile, headers=self.headers, environ={ "REQUEST_METHOD": "POST", "CONTENT_TYPE": self.headers["Content-Type"], }, ) if "file" in form: with open(MPK_FILE, "wb") as output: shutil.copyfileobj(form["file"].file, output) update_project_dir() mxbuild_response = build() logger.debug(mxbuild_response) if mxbuild_response["status"] == "Busy": return (200, {"state": "BUSY"}, mxbuild_response) if mxbuild_response["status"] != "Success": # possible 'status': Success, Failure, Busy logger.warning("Failed to build project, " "keeping previous model running") state = "FAILED" elif mxbuild_response["restartRequired"] is True: logger.info("Restarting app after MPK push") self.server.restart_callback() state = "STARTED" else: logger.info("Reloading model after MPK push") self.server.reload_callback() state = "STARTED" return (200, {"state": state}, mxbuild_response) else: return ( 401, { "state": "FAILED", "errordetails": "No MPK found" }, None, ) except MxBuildFailure as mbf: logger.warning( "InstaDeploy terminating with MxBuildFailure: {}".format( mbf.message)) return (200, {"state": "FAILED"}, mbf.mxbuild_response) except Exception: logger.warning("Instadeploy failed", exc_info=True) return ( 500, { "state": "FAILED", "errordetails": traceback.format_exc() }, None, )
def activate_license(): prefs_dir = os.path.expanduser('~/../.java/.userPrefs/com/mendix/core') buildpackutil.mkdir_p(prefs_dir) prefs_template = """<?xml version="1.0" encoding="UTF-8" standalone="no"?> <!DOCTYPE map SYSTEM "http://java.sun.com/dtd/preferences.dtd"> <map MAP_XML_VERSION="1.0"> <entry key="id" value="{{LICENSE_ID}}"/> <entry key="license_key" value="{{LICENSE_KEY}}"/> </map>""" license_key = os.environ.get( 'FORCED_LICENSE_KEY', os.environ.get('LICENSE_KEY', None) ) server_id = os.environ.get( 'FORCED_SERVER_ID', os.environ.get('SERVER_ID', None) ) license_id = os.environ.get( 'FORCED_LICENSE_ID', os.environ.get('LICENSE_ID', None) ) if server_id: logger.warning('SERVER_ID is deprecated, please use LICENSE_ID instead') if not license_id: license_id = server_id if license_key is not None and license_id is not None: logger.debug('A license was supplied so going to activate it') prefs_body = prefs_template.replace( '{{LICENSE_ID}}', license_id ).replace( '{{LICENSE_KEY}}', license_key ) with open(os.path.join(prefs_dir, 'prefs.xml'), 'w') as prefs_file: prefs_file.write(prefs_body)
def run(self): try: while True: proc = subprocess.Popen([ "./bin/mendix-logfilter", "-r", self.log_ratelimit, "-f", "log/out.log", ]) proc.wait() logger.warning( "MENDIX LOGGING: Mendix logfilter crashed with return code " "%s. This is nothing to worry about, we are restarting the " "logfilter now.", proc.returncode, ) except Exception: logger.warning( "MENDIX LOGGING: Logging pipeline failed completely. To " "restore log availibility, restart your application.", exc_info=True, )
def get_constants(metadata): constants = {} constants_from_json = {} constants_json = os.environ.get('CONSTANTS', json.dumps(constants_from_json)) try: constants_from_json = json.loads(constants_json) except Exception as e: logger.warning('Failed to parse CONSTANTS: ' + str(e)) for constant in metadata['Constants']: constant_name = constant['Name'] env_name = 'MX_%s' % constant_name.replace('.', '_') value = os.environ.get(env_name, constants_from_json.get(constant_name)) if value is None: value = constant['DefaultValue'] logger.debug('Constant not found in environment, taking default ' 'value %s' % constant_name) if constant['Type'] == 'Integer': value = int(value) constants[constant_name] = value return constants
def run(runtime_version): if not is_enabled(): return if runtime_version < 7.14: logger.warning( "Datadog integration requires Mendix 7.14 or newer. The Datadog agent is not enabled." ) if not _is_installed(): logger.warn( "DataDog agent isn" "t installed yet but DD_API_KEY is set. " + "Please push or restage your app to complete DataDog installation." ) return e = dict(os.environ) e["DD_HOSTNAME"] = buildpackutil.get_hostname() e["DD_API_KEY"] = get_api_key() e["LD_LIBRARY_PATH"] = os.path.abspath(".local/datadog/lib/") subprocess.Popen( (".local/datadog/datadog-agent", "-c", ".local/datadog", "start"), env=e, ) # after datadog agent 6.3 is released, a separate process agent might # not be necessary any more: https://github.com/DataDog/datadog-process-agent/pull/124 subprocess.Popen( ( ".local/datadog/process-agent", "-logtostderr", "-config", ".local/datadog/datadog.yaml", ), env=e, )
def build(): mpr = os.path.abspath(buildpackutil.get_mpr_file_from_dir(PROJECT_DIR)) response = requests.post( "http://localhost:6666/build", data=json.dumps( { "target": "Deploy", "projectFilePath": mpr, "forceFullDeployment": False, } ), headers={"Content-Type": "application/json"}, timeout=120, ) if response.status_code != requests.codes.ok: raise MxBuildFailure( "MxBuild failure", response.status_code, response.json() ) result = response.json() if result["status"] == "Success": try: sync_project_files() logger.info("Syncing project files ...") except Exception: logger.warning( "Syncing project files failed: %s", traceback.format_exc() ) raise try: send_metadata_to_cloudportal() except Exception: logger.warning( "Failed to send instadeploy feedback to Cloud Portal", exc_info=True, ) else: logger.warning("Not syncing project files. MxBuild result: %s", result) return result
def start_app(m2ee): m2ee.start_appcontainer() if not m2ee.send_runtime_config(): sys.exit(1) logger.debug("Appcontainer has been started") abort = False success = False while not (success or abort): startresponse = m2ee.client.start({"autocreatedb": True}) logger.debug("startresponse received") result = startresponse.get_result() if result == 0: success = True logger.info("The MxRuntime is fully started now.") else: startresponse.display_error() if result == 2: logger.warning("DB does not exists") abort = True elif result == 3: if i_am_primary_instance(): if os.getenv("SHOW_DDL_COMMANDS", "").lower() == "true": for line in m2ee.client.get_ddl_commands( {"verbose": True} ).get_feedback()["ddl_commands"]: logger.info(line) m2eeresponse = m2ee.client.execute_ddl_commands() if m2eeresponse.has_error(): m2eeresponse.display_error() abort = True else: logger.info( "waiting 10 seconds before primary instance " "synchronizes database" ) time.sleep(10) elif result == 4: logger.warning("Not enough constants!") abort = True elif result == 5: logger.warning("Unsafe password!") abort = True elif result == 6: logger.warning("Invalid state!") abort = True elif result == 7 or result == 8 or result == 9: logger.warning( "You'll have to fix the configuration and run start " "again... (or ask for help..)" ) abort = True else: abort = True if abort: logger.warning("start failed, stopping") sys.exit(1)
def service_backups(): vcap_services = buildpackutil.get_vcap_services_data() schnapps = None amazon_s3 = None for key in vcap_services: if key.startswith("amazon-s3"): amazon_s3 = key if key.startswith("schnapps"): schnapps = key if not vcap_services or schnapps not in vcap_services: logger.debug("No backup service detected") return backup_service = {} if amazon_s3 in vcap_services: s3_credentials = vcap_services[amazon_s3][0]["credentials"] backup_service["filesCredentials"] = { "accessKey": s3_credentials["access_key_id"], "secretKey": s3_credentials["secret_access_key"], "bucketName": s3_credentials["bucket"], } if "key_suffix" in s3_credentials: # Not all s3 plans have this field backup_service["filesCredentials"]["keySuffix"] = s3_credentials[ "key_suffix" ] try: db_config = buildpackutil.get_database_config() if db_config["DatabaseType"] != "PostgreSQL": raise Exception( "Schnapps only supports postgresql, not %s" % db_config["DatabaseType"] ) host_and_port = db_config["DatabaseHost"].split(":") backup_service["databaseCredentials"] = { "host": host_and_port[0], "username": db_config["DatabaseUserName"], "password": db_config["DatabasePassword"], "dbname": db_config["DatabaseName"], "port": int(host_and_port[1]) if len(host_and_port) > 1 else 5432, } except Exception as e: logger.exception( "Schnapps will not be activated because error occurred with " "parsing the database credentials" ) return schnapps_url = vcap_services[schnapps][0]["credentials"]["url"] schnapps_api_key = vcap_services[schnapps][0]["credentials"]["apiKey"] try: result = requests.put( schnapps_url, headers={ "Content-Type": "application/json", "apiKey": schnapps_api_key, }, data=json.dumps(backup_service), ) except requests.exceptions.SSLError as e: logger.warning("Failed to contact backup service. SSLError: " + str(e)) return except Exception as e: logger.warning("Failed to contact backup service: ", exc_info=True) return if result.status_code == 200: logger.info("Successfully updated backup service") else: logger.warning("Failed to update backup service: " + result.text)
def _handle_all(self): logger.warning(MAINTENANCE_MESSAGE) self.send_response(503) self.send_header("X-Mendix-Cloud-Mode", "maintenance") self.end_headers() self.wfile.write(MAINTENANCE_MESSAGE.encode("utf-8"))
def loop_until_process_dies(m2ee): while m2ee.runner.check_pid(): time.sleep(10) logger.info('process died, stopping') sys.exit(1) def am_i_primary_instance(): return os.getenv('CF_INSTANCE_INDEX', '0') == '0' if __name__ == '__main__': if os.getenv('CF_INSTANCE_INDEX') is None: logger.warning( 'CF_INSTANCE_INDEX environment variable not found. Assuming ' 'responsibility for scheduled events execution and database ' 'synchronization commands.' ) pre_process_m2ee_yaml() activate_license() set_up_logging_file() m2ee = set_up_m2ee_client(get_vcap_data()) def sigterm_handler(_signo, _stack_frame): m2ee.stop() sys.exit(0) signal.signal(signal.SIGTERM, sigterm_handler) service_backups() start_app(m2ee)
def complete_start_procedure_safe_to_use_for_restart(m2ee): buildpackutil.mkdir_p('model/lib/userlib') set_up_logging_file() start_app(m2ee) create_admin_user(m2ee) configure_logging(m2ee) display_running_version(m2ee) configure_debugger(m2ee) if __name__ == '__main__': if os.getenv('CF_INSTANCE_INDEX') is None: logger.warning( 'CF_INSTANCE_INDEX environment variable not found. Assuming ' 'responsibility for scheduled events execution and database ' 'synchronization commands.') pre_process_m2ee_yaml() activate_license() m2ee = set_up_m2ee_client(get_vcap_data()) def sigterm_handler(_signo, _stack_frame): m2ee.stop() sys.exit(0) signal.signal(signal.SIGTERM, sigterm_handler) try: service_backups() set_up_nginx_files(m2ee) complete_start_procedure_safe_to_use_for_restart(m2ee)
def do_GET(self): self._handle_all() def do_POST(self): self._handle_all() def do_PUT(self): self._handle_all() def do_HEAD(self): self._handle_all() if os.environ.get("DEBUG_CONTAINER", "false").lower() == "true": logger.warning(MAINTENANCE_MESSAGE) port = int(os.environ.get("PORT", 8080)) httpd = HTTPServer(("", port), Maintenance) httpd.serve_forever() def emit(**stats): stats["version"] = "1.0" stats["timestamp"] = datetime.datetime.now().isoformat() logger.info("MENDIX-METRICS: " + json.dumps(stats)) def get_nginx_port(): return int(os.environ["PORT"])
def get_filestore_config(m2ee): access_key = secret = bucket = encryption_keys = key_suffix = None vcap_services = buildpackutil.get_vcap_services_data() endpoint = None v2_auth = '' if vcap_services and 'amazon-s3' in vcap_services: _conf = vcap_services['amazon-s3'][0]['credentials'] access_key = _conf['access_key_id'] secret = _conf['secret_access_key'] bucket = _conf['bucket'] if 'encryption_keys' in _conf: encryption_keys = _conf['encryption_keys'] if 'key_suffix' in _conf: key_suffix = _conf['key_suffix'] elif vcap_services and 'p-riakcs' in vcap_services: _conf = vcap_services['p-riakcs'][0]['credentials'] access_key = _conf['access_key_id'] secret = _conf['secret_access_key'] pattern = r'https://(([^:]+):([^@]+)@)?([^/]+)/(.*)' match = re.search(pattern, _conf['uri']) endpoint = 'https://' + match.group(4) bucket = match.group(5) v2_auth = 'true' access_key = os.getenv('S3_ACCESS_KEY_ID', access_key) secret = os.getenv('S3_SECRET_ACCESS_KEY', secret) bucket = os.getenv('S3_BUCKET_NAME', bucket) if 'S3_ENCRYPTION_KEYS' in os.environ: encryption_keys = json.loads(os.getenv('S3_ENCRYPTION_KEYS')) perform_deletes = os.getenv('S3_PERFORM_DELETES', '').lower() == 'false' key_suffix = os.getenv('S3_KEY_SUFFIX', key_suffix) endpoint = os.getenv('S3_ENDPOINT', endpoint) v2_auth = os.getenv('S3_USE_V2_AUTH', v2_auth).lower() == 'true' sse = os.getenv('S3_USE_SSE', '').lower() == 'true' if not (access_key and secret and bucket): logger.warning( 'External file store not configured, uploaded files in the app ' 'will not persist across restarts. See https://github.com/mendix/' 'cf-mendix-buildpack for file store configuration details.' ) return {} logger.info( 'S3 config detected, activating external file store' ) config = { 'com.mendix.core.StorageService': 'com.mendix.storage.s3', 'com.mendix.storage.s3.AccessKeyId': access_key, 'com.mendix.storage.s3.SecretAccessKey': secret, 'com.mendix.storage.s3.BucketName': bucket, } if not perform_deletes: config['com.mendix.storage.s3.PerformDeleteFromStorage'] = False if key_suffix: config['com.mendix.storage.s3.ResourceNameSuffix'] = key_suffix if v2_auth: config['com.mendix.storage.s3.UseV2Auth'] = v2_auth if endpoint: config['com.mendix.storage.s3.EndPoint'] = endpoint if m2ee.config.get_runtime_version() >= 5.17 and encryption_keys: config['com.mendix.storage.s3.EncryptionKeys'] = encryption_keys if m2ee.config.get_runtime_version() >= 6 and sse: config['com.mendix.storage.s3.UseSSE'] = sse return config
def init(self): patterns = [ r"(?P<type>[a-zA-Z0-9]+)://(?P<user>[^:]+):(?P<password>[^@]+)@(?P<host>[^/]+)/(?P<dbname>[^?]*)(?P<extra>\?.*)?", # noqa: E501 r"jdbc:(?P<type>[a-zA-Z0-9]+)://(?P<host>[^;]+);database=(?P<dbname>[^;]*);user=(?P<user>[^;]+);password=(?P<password>.*)$", # noqa: E501 ] supported_databases = { "postgres": "PostgreSQL", "postgresql": "PostgreSQL", "mysql": "MySQL", "db2": "Db2", "sqlserver": "SQLSERVER", } for pattern in patterns: match = re.search(pattern, self.url) if match is not None: break else: raise Exception( "Could not parse database credentials from database uri %s" % self.url ) database_type_input = match.group("type") if database_type_input not in supported_databases: raise Exception("Unknown database type: %s", database_type_input) database_type = supported_databases[database_type_input] config = { "DatabaseType": database_type, "DatabaseUserName": match.group("user"), "DatabasePassword": match.group("password"), "DatabaseHost": match.group("host"), "DatabaseName": match.group("dbname"), } # parsing additional parameters # 1) check for sslmode in existing jdbc url for m2ee config # 2) update jdbc url (from vcap) with input from DATABASE_CONNECTION_PARAMS jdbc_params = {} # getting values from url has_extra = "extra" in match.groupdict() and match.group("extra") if has_extra: extra = match.group("extra").lstrip("?") jdbc_params = parse_qs(extra) # defaults if database_type == "PostgreSQL": jdbc_params.update({"tcpKeepAlive": "true"}) extra_url_params_str = self.env_vars.get( "DATABASE_CONNECTION_PARAMS", "{}" ) if extra_url_params_str is not None: try: extra_url_params = json.loads(extra_url_params_str) jdbc_params.update(extra_url_params) except Exception: logger.warning( "Invalid JSON string for DATABASE_CONNECTION_PARAMS" ) # generate jdbc_url, might be None jdbc_url = self.get_jdbc_strings(self.url, match, config, jdbc_params) if jdbc_url is not None: logger.debug("Setting JDBC url: {}".format(jdbc_url)) config.update({"DatabaseJdbcUrl": jdbc_url}) if "sslmode" in jdbc_params: sslmode = jdbc_params["sslmode"] if sslmode and sslmode[0] == "require": config.update({"DatabaseUseSsl": True}) self.m2ee_config = config
def get_m2ee_password(): m2ee_password = os.getenv('M2EE_PASSWORD', get_admin_password()) if not m2ee_password: logger.warning('No M2EE_PASSWORD set, generating a random password for protection') m2ee_password = default_m2ee_password return m2ee_password
def service_backups(): vcap_services = buildpackutil.get_vcap_services_data() schnapps = None amazon_s3 = None for key in vcap_services: if key.startswith("amazon-s3"): amazon_s3 = key if key.startswith("schnapps"): schnapps = key if not vcap_services or schnapps not in vcap_services: logger.debug("No backup service detected") return backup_service = {} if amazon_s3 in vcap_services: s3_credentials = vcap_services[amazon_s3][0]['credentials'] backup_service['filesCredentials'] = { 'accessKey': s3_credentials['access_key_id'], 'secretKey': s3_credentials['secret_access_key'], 'bucketName': s3_credentials['bucket'], } if 'key_suffix' in s3_credentials: # Not all s3 plans have this field backup_service['filesCredentials']['keySuffix'] = s3_credentials[ 'key_suffix'] try: db_config = buildpackutil.get_database_config() if db_config['DatabaseType'] != 'PostgreSQL': raise Exception('Schnapps only supports postgresql, not %s' % db_config['DatabaseType']) host_and_port = db_config['DatabaseHost'].split(':') backup_service['databaseCredentials'] = { 'host': host_and_port[0], 'username': db_config['DatabaseUserName'], 'password': db_config['DatabasePassword'], 'dbname': db_config['DatabaseName'], 'port': int(host_and_port[1]) if len(host_and_port) > 1 else 5432, } except Exception as e: logger.exception( 'Schnapps will not be activated because error occurred with ' 'parsing the database credentials') return schnapps_url = vcap_services[schnapps][0]['credentials']['url'] schnapps_api_key = vcap_services[schnapps][0]['credentials']['apiKey'] try: result = requests.put( schnapps_url, headers={ 'Content-Type': 'application/json', 'apiKey': schnapps_api_key }, data=json.dumps(backup_service), ) except requests.exceptions.SSLError as e: logger.warning('Failed to contact backup service. SSLError: ' + str(e)) return except Exception as e: logger.warning('Failed to contact backup service: ' + e) return if result.status_code == 200: logger.info("Successfully updated backup service") else: logger.warning("Failed to update backup service: " + result.text)
def start_app(m2ee): m2ee.start_appcontainer() if not m2ee.send_runtime_config(): sys.exit(1) logger.debug('Appcontainer has been started') abort = False success = False while not (success or abort): startresponse = m2ee.client.start({'autocreatedb': True}) result = startresponse.get_result() if result == 0: success = True logger.info('The MxRuntime is fully started now.') else: startresponse.display_error() if result == 2: logger.warning('DB does not exists') abort = True elif result == 3: m2eeresponse = m2ee.client.execute_ddl_commands() m2eeresponse.display_error() elif result == 4: logger.warning('Not enough constants!') abort = True elif result == 5: logger.warning('Unsafe password!') abort = True elif result == 6: logger.warning('Invalid state!') abort = True elif result == 7 or result == 8 or result == 9: logger.warning( "You'll have to fix the configuration and run start " "again... (or ask for help..)" ) abort = True else: abort = True if abort: logger.warning('start failed, stopping') sys.exit(1)