def create_tree_folder(self, tree_index, tree, generation_number, fitness, persist_status, visualize_tree_status, visualize_function_status): tree_path = f"{self.experiment_root_path}/Generation_{generation_number}/Candidates/Tree{tree_index}_{fitness}" os.mkdir(tree_path) # This code only works when put before the json_persistence. # When you put it after, it only visualizes if visualize_tree_status: try: Visualize.visualize([tree], self.experiment_id, tree_path) except: print("Failed to Visualize Expression Tree") if persist_status: try: tree_stats = Statistics.statistics(tree) json_persistor = JsonPersistor(tree_stats, "stats", tree_path) json_persistor.persist() except: print("Failed to Persist Tree Stats Into a JSON File") if visualize_function_status: try: self.plot_loss(tree.symbolic_expression, tree_path) except: print("Failed to Visualize Loss Function")
def persist_best_candidate(self, best_candidate, generation_idx, persist_status, visualize_tree_status, visualize_function_status): best_candidate_path = f"{self.experiment_root_path}/Generation_{generation_idx}/Best_Candidate" os.mkdir(best_candidate_path) # This code only works when put before the json_persistence. # When you put it after, it only visualizes if visualize_tree_status: try: Visualize.visualize([best_candidate], self.experiment_id, best_candidate_path) except: print("Failed to Visualize Expression Tree") if persist_status: try: tree_stats = Statistics.statistics(best_candidate) json_persistor = JsonPersistor(tree_stats, "stats", best_candidate_path) json_persistor.persist() except: print("Failed to Persist Tree Stats Into a JSON File") if visualize_function_status: try: self.plot_loss(best_candidate.symbolic_expression, best_candidate_path) except: print("Failed to Visualize Loss Function") # set the file path and name for the csv file csv_file_name = f"{self.experiment_root_path}/Best_Trees.csv" # create an input list to write to the csv file csv_input = [ str(generation_idx), best_candidate.generate_printable_expression(), str(best_candidate.fitness) ] # if the file exists, append the next generation's best candidate to the end if os.path.exists(csv_file_name): with open(csv_file_name, 'a') as writeFile: writer = csv.writer(writeFile) writer.writerow(csv_input) writeFile.close() # if the file does not exist, create a new file and write to the first row else: with open(csv_file_name, 'w+') as writeFile: writer = csv.writer(writeFile) writer.writerow(csv_input) writeFile.close()
def persist(trees, population_obj): experiment_id = calendar.timegm(time.gmtime()) print(f"To refer to this test Experiment, the ID is: {experiment_id}") base_dir = f"{os.getcwd()}/results/glo_test_{experiment_id}/candidates" os.makedirs(base_dir) for tree_idx, tree in enumerate(trees): candidate_path = f"{base_dir}/tree_{tree_idx}" os.makedirs(candidate_path) stats = Statistics.statistics(tree) json_persistor = JsonPersistor("stats", candidate_path) json_persistor.persist(stats) pickle_persistor = PicklePersistor("tree", candidate_path) pickle_persistor.persist(tree)
def __init__(self, config_path, project_name): """ The constructor initializes the config from the config path. :param config_path: """ config_path = config_path config_folder = os.path.dirname(config_path) config_name = os.path.basename(config_path) config_name = os.path.splitext(config_name)[0] json_restorer = JsonPersistor(None, base_file_name=config_name, folder=config_folder) self.config = json_restorer.restore() self.config_type = self.config.get("type", "runnable") self.project_name = project_name self.config = self.extract_env() self.initialize_queue_default() self.assign_ports_to_configs()
def persist_essential_configs(queue, storage, persist_path): """ Persists the queue_config and the storage_config for the worker as a JSON :return: """ from servicecommon.persistor.local.json.json_persistor import JsonPersistor configs_path = persist_path queue_config = queue["config"] queue_file_name = queue["filename"] storage_config = storage["config"] storage_file_name = storage["filename"] json_persistor = JsonPersistor(queue_config, queue_file_name, configs_path) json_persistor.persist() json_persistor = JsonPersistor(storage_config, storage_file_name, configs_path) json_persistor.persist()
from queuingservices.sqs.subscriber import Subscriber from queuingservices.sqs.queue_lifecycle import QueueLifecycle from servicecommon.persistor.local.json.json_persistor import JsonPersistor def create_queue(obj, queue_name): temp_queue = obj.create_queue(queue_name=queue_name) return temp_queue if __name__ == "__main__": CREDENTIALS_PATH = "./creds/aws/sqs/" restore_obj = JsonPersistor(dict=None, base_file_name='credentials', folder=CREDENTIALS_PATH) credentials_dict = restore_obj.restore() new_queue_obj = QueueLifecycle(credentials_dict=credentials_dict) my_queue = new_queue_obj.create_queue('myqueue.fifo') queue_url = my_queue.url worker = Subscriber(credentials_dict=credentials_dict, queue_url=queue_url) worker.start_server()
from queuingservices.managers.queue_subscriber_manager import QueueSubscriberManager from queuingservices.managers.queue_publisher_manager import QueuePublisherManager from queuingservices.managers.queue_lifecycle_manager import QueueLifecycleManager from servicecommon.persistor.local.json.json_persistor import JsonPersistor if __name__ == "__main__": try: # Get the queue config dictionaries rmq_restore = JsonPersistor(dict=None, base_file_name="rmq_queue_config", folder="creds/") sqs_restore = JsonPersistor(dict=None, base_file_name="sqs_queue_config", folder="creds/") rmq_queue_config = rmq_restore.restore() sqs_queue_config = sqs_restore.restore() # get the QueueOrchestrator objects by sending them the corresponding # queue config dictionaries rmq_sub_manage = QueueSubscriberManager(queue_config=rmq_queue_config) rmq_pub_manage = QueuePublisherManager(queue_config=rmq_queue_config) rmq_life_manage = QueueLifecycleManager(queue_config=rmq_queue_config) sqs_sub_manage = QueueSubscriberManager(queue_config=sqs_queue_config) sqs_pub_manage = QueuePublisherManager(queue_config=sqs_queue_config) sqs_life_manage = QueueLifecycleManager(queue_config=sqs_queue_config) # get the correct queue objects rmq_subscriber = rmq_sub_manage.build_subscribe_object() rmq_publisher = rmq_pub_manage.build_publisher_object() rmq_lifecycle = rmq_life_manage.build_lifecycle_object()
help='Path of JSON Describing the Queue config') parser.add_argument('--storage_config_path', help='Path of JSON Describing the Storage config') parser.add_argument('--project_name', help='Name of the Project.') args = parser.parse_args() project_name = args.project_name queue_config_path = args.queue_config_path queue_config_name = os.path.basename(queue_config_path) queue_config_name_without_ext = os.path.splitext(queue_config_name)[0] queue_config_folder = os.path.dirname(queue_config_path) storage_config_path = args.storage_config_path storage_config_name = os.path.basename(storage_config_path) storage_config_name_without_ext = os.path.splitext(storage_config_name)[0] storage_config_folder = os.path.dirname(storage_config_path) json_restorer = JsonPersistor(None, base_file_name=queue_config_name_without_ext, folder=queue_config_folder) queue_config = json_restorer.restore() json_restorer = JsonPersistor( None, base_file_name=storage_config_name_without_ext, folder=storage_config_folder) storage_config = json_restorer.restore() CompletionService(queue_config, storage_config)