env_vars = {'in_topic': os.getenv('IN_TOPIC'), 'in_group': os.getenv('IN_GROUP'), 'in_schema_file': os.getenv('IN_SCHEMA_FILE'), 'out_topic': os.getenv('OUT_TOPIC'), 'out_schema_file': os.getenv('OUT_SCHEMA_FILE')} env_vars = {'in_topic': {'DB_raw_data': './schema/data.avsc'}, 'in_group': 'monitoring', 'in_schema_file': './schema/data.avsc', 'out_topic': 'AB_monitoring', 'out_schema_file': './schema/monitoring.avsc', 'config_path': 'config.yaml'} """ new_pc = KafkaPC(**env_vars) for msg in new_pc.consumer: """ "name": "Data", "fields": [ {"name": "phase", "type": ["string"]}, {"name": "id_x", "type": ["int"]}, {"name": "x", "type": ["float"]}, {"name": "y", "type": ["float"]} ] """ new_data = new_pc.decode_avro_msg(msg) """
import os import requests from classes.KafkaPC import KafkaPC env_vars = {'config_path': os.getenv('config_path'), 'config_section': os.getenv('config_section')} new_pc = KafkaPC(**env_vars) API_URL = new_pc.config['API_URL'] ENDPOINT = "/production_parameter/x" URL = API_URL + ENDPOINT for msg in new_pc.consumer: """ "name": "New X", "fields": [ {"name": "new_x", "type": ["float"]} ] """ new_message = new_pc.decode_avro_msg(msg) # defining a params dict for the parameters to be sent to the API params = {"value": new_message['new_x']} # sending get request and saving the response as response object print(f"Send x={round(new_message['new_x'], 3)} to the CPPS Controller") r = requests.put(url=URL, params=params)
"y": { "new_x": msgdata["new_x"], "algorithm": splitData[0] }, } new_c.send_msg(new_data_point) print("message sent") env_vars = { "config_path": os.getenv("config_path"), "config_section": os.getenv("config_section"), } new_c = KafkaPC(**env_vars) api_dict = new_c.config["API_OUT"] plot_dict = new_c.config["PLOT_TOPIC"] API_URL = new_c.config["API_URL"] ENDPOINT = new_c.config["API_ENDPOINT"] for msg in new_c.consumer: # tests if msg.topic is in api_dict and calls function from dict try: if api_dict.get(msg.topic) is not None: eval(api_dict[msg.topic])(msg) except Exception as e: print( f"Processing Topic: {msg.topic} with Function: {api_dict[msg.topic]}\n Error: {e}"
return MODEL_PARAMETERS env_vars = { 'config_path': os.getenv('config_path'), 'config_section': os.getenv('config_section') } """ env_vars = {'in_topic': {'DB_raw_data': './schema/data.avsc'}, 'in_group': 'kriging', 'in_schema_file': './schema/data.avsc', 'out_topic': 'AB_model_data', 'out_schema_file': './schema/model.avsc'} """ new_pc = KafkaPC(**env_vars) MODEL_ALGORITHM = new_pc.config['MODEL_ALGORITHM'] API_URL = new_pc.config['API_URL'] MODEL_PARAMETERS = get_model_parameters(API_URL) new_window = DataWindow() MIN_DATA_POINTS = 5 for msg in new_pc.consumer: """ "name": "Data", "fields": [ {"name": "phase", "type": ["string"]}, {"name": "id_x", "type": ["int"]},
import requests import json import time import os from classes.KafkaPC import KafkaPC from classes.caai_util import ObjectiveFunction env_vars = { 'config_path': os.getenv('config_path'), 'config_section': os.getenv('config_section') } new_pc = KafkaPC(**env_vars) new_objective = ObjectiveFunction() new_objective.load_data(data_path=new_pc.config['data_path'], x_columns=new_pc.config['x_columns'], y_columns=new_pc.config['y_columns']) new_objective.fit_model() N_INITIAL_DESIGN = new_pc.config['N_INITIAL_DESIGN'] MAX_PRODUCTION_CYCLES = new_pc.config['MAX_PRODUCTION_CYCLES'] phase = 'init' current_data_point = 0 time.sleep(5) while current_data_point < MAX_PRODUCTION_CYCLES: if current_data_point == N_INITIAL_DESIGN: