def update_project_dir(): logging.debug("unzipping " + MPK_FILE + " to " + INCOMING_MPK_DIR) subprocess.check_call(("rm", "-rf", INCOMING_MPK_DIR)) util.mkdir_p(INCOMING_MPK_DIR) subprocess.check_call(("unzip", "-oqq", MPK_FILE, "-d", INCOMING_MPK_DIR)) new_mpr = os.path.basename(util.get_mpr_file_from_dir(INCOMING_MPK_DIR)) existing_mpr_path = util.get_mpr_file_from_dir(PROJECT_DIR) if existing_mpr_path: existing_mpr = os.path.basename(existing_mpr_path) else: existing_mpr = None logging.debug("rsync from incoming to intermediate") if util.get_buildpack_loglevel() < logging.INFO: quiet_or_verbose = "--verbose" else: quiet_or_verbose = "--quiet" subprocess.call(( "rsync", "--recursive", "--checksum", "--delete", INCOMING_MPK_DIR + "/", INTERMEDIATE_MPK_DIR + "/", )) logging.debug("rsync from intermediate to project") if new_mpr == existing_mpr: update_or_delete = "--update" else: update_or_delete = "--delete" subprocess.call(( "rsync", "--recursive", update_or_delete, quiet_or_verbose, INTERMEDIATE_MPK_DIR + "/", PROJECT_DIR + "/", ))
break emit(jvm={"crash": 1.0}) logging.info("process died, stopping") sys.exit(1) def emit(**stats): stats["version"] = "1.0" stats["timestamp"] = datetime.datetime.now().isoformat() logging.info("MENDIX-METRICS: %s", json.dumps(stats)) if __name__ == "__main__": logging.basicConfig( level=util.get_buildpack_loglevel(), stream=sys.stdout, format="%(levelname)s: %(message)s", ) commit = util.get_current_buildpack_commit() if commit == "unknown_commit": logging.debug("Failed to read file", exc_info=True) logging.info( "Started Mendix Cloud Foundry Buildpack v%s [commit:%s]", BUILDPACK_VERSION, commit, ) try: if os.getenv("CF_INSTANCE_INDEX") is None:
import backoff from buildpack import util from buildpack.runtime_components import ( backup, database, logs, metrics, security, storage, ) from lib.m2ee import M2EE as m2ee_class from lib.m2ee import logger from lib.m2ee.version import MXVersion logger.setLevel(util.get_buildpack_loglevel()) # Disable duplicate log lines for M2EE handlers = logging.getLogger("m2ee").handlers if len(handlers) > 2: logging.getLogger("m2ee").handlers = handlers[:2] logging.getLogger("m2ee").propagate = False def check_deprecation(version): if version >= MXVersion("5.0.0") and version < MXVersion("6.0.0"): return False return True
def load_config(configs, database_config, parameters_replacement): loaded_json = [] for config in configs: try: tmp_json = json.loads(config.read()) except Exception as exception: raise Exception( "Error loading input file called {}. Reason: '{}'".format( config.name, exception)) # Special check for metadata files, if they exist the idea is to replace the # non existent constants with their default values if (config.name.endswith("metadata.json") and tmp_json["Constants"] and type(tmp_json["Constants"]) is list): tmp_json["Constants"] = dict( map( lambda constant: ( constant["Name"], constant["DefaultValue"], ), tmp_json["Constants"], )) loaded_json.append(convert_dot_field_to_dict(tmp_json)) modified_env_vars = OmegaConf.create() if database_config: modified_env_vars.update(database_config) for prefix in [ENV_VAR_RUNTIME_PREFIX, ENV_VAR_BROKER_PREFIX]: env_vars = dict( filter( lambda key: key[0].startswith(prefix) and key[0] in whitelist, dict(os.environ).items(), )) for key, value in env_vars.items(): new_key = __curate_key(key, prefix) OmegaConf.update(modified_env_vars, new_key, value) # Fetch and update any constants passed as env var const_env_vars = dict( filter( lambda key: key[0].startswith(CONSTANTS_ENV_VAR_PREFIX), dict(os.environ).items(), )) modified_constants = OmegaConf.create({"Constants": {}}) for key, value in const_env_vars.items(): new_key = key.replace(CONSTANTS_ENV_VAR_PREFIX, "", 1) new_key = new_key.replace("_", ".", 1) OmegaConf.update(modified_constants.Constants, new_key, value) parameters_replacement_dict = OmegaConf.create() for key, value in parameters_replacement: OmegaConf.update(parameters_replacement_dict, key, value) try: complete_conf = OmegaConf.merge(*loaded_json, modified_env_vars, modified_constants, parameters_replacement_dict) bootstrap_servers = get_value_for_constant( complete_conf, complete_conf.DataBrokerConfiguration.publishedServices[0]. brokerUrl, ) OmegaConf.update(complete_conf, BOOTSTRAP_SERVERS_KEY, bootstrap_servers) if not OmegaConf.select(complete_conf, NODE_COUNT_KEY): complete_conf[NODE_COUNT_KEY] = 1 __generate_source_topic_names(complete_conf) OmegaConf.update( complete_conf, "log_level", "DEBUG" if util.get_buildpack_loglevel() == logging.DEBUG else "INFO", ) return complete_conf except Exception as exception: raise Exception( "Error while reading input config files. Reason: '{}'".format( exception))
) from buildpack.databroker.config_generator.scripts.utils import write_file # Constants BASE_URL = "/mx-buildpack/experimental/databroker/" TAR_EXT = "tar" BASE_DIR = "databroker" AZKARRA_TPLY_CONF_NAME = "topology.conf" PDR_STREAMS_FILENAME = "stream-sidecar" DEFAULT_PDR_STREAMS_VERSION = "0.23.0-9" PDR_STREAMS_DIR = os.path.join(BASE_DIR, "producer-streams") PROCESS_NAME = "kafka-streams" KAFKA_STREAMS_JMX_PORT = "11004" LOCAL = ".local" LOG_LEVEL = ("DEBUG" if util.get_buildpack_loglevel() == logging.DEBUG else "INFO") def get_pdr_stream_version(): return os.environ.get("DATABROKER_STREAMS_VERSION", DEFAULT_PDR_STREAMS_VERSION) PDR_STREAMS_HOME = os.path.join( PDR_STREAMS_DIR, "{}-{}".format(PDR_STREAMS_FILENAME, get_pdr_stream_version()), ) AZKARRA_CONF_PATH = os.path.join(os.getcwd(), LOCAL, PDR_STREAMS_HOME, "azkarra.conf") PDR_STREAMS_JAR = os.path.join( os.getcwd(),