def main(argv): """ The main function. """ logger.info("Workload Invoker started") print("Log file -> logs/SWI.log") parser = OptionParser() parser.add_option( "-c", "--config_json", dest="config_json", help="The input json config file describing the synthetic workload.", metavar="FILE") (options, args) = parser.parse_args() if not CheckJSONConfig(options.config_json): logger.error("Invalid or no JSON config file!") return False # Abort the function if json file not valid workload = ReadJSONConfig(options.config_json) if not CheckWorkloadValidity( workload=workload, supported_distributions=supported_distributions): return False # Abort the function if json file not valid [all_events, event_count] = GenericEventGenerator(workload) threads = [] for (instance, instance_times) in all_events.items(): # Previous method to run processes # instance_script = 'bash ' + FAAS_ROOT + '/invocation-scripts/' + \ # workload['instances'][instance]['application']+'.sh' # threads.append(threading.Thread(target=PROCESSInstanceGenerator, args=[instance, instance_script, instance_times, workload['blocking_cli']])) # New method action = workload['instances'][instance]['application'] try: param_file = workload['instances'][instance]['param_file'] except: param_file = None blocking_cli = workload['blocking_cli'] if 'data_file' in workload['instances'][instance].keys(): data_file = workload['instances'][instance]['data_file'] threads.append( threading.Thread( target=BinaryDataHTTPInstanceGenerator, args=[action, instance_times, blocking_cli, data_file])) else: threads.append( threading.Thread( target=HTTPInstanceGenerator, args=[action, instance_times, blocking_cli, param_file])) pass # Dump Test Metadata os.system("date +%s%N | cut -b1-13 > " + FAAS_ROOT + "/synthetic-workload-invoker/test_metadata.out") os.system("echo " + options.config_json + " >> " + FAAS_ROOT + "/synthetic-workload-invoker/test_metadata.out") os.system("echo " + str(event_count) + " >> " + FAAS_ROOT + "/synthetic-workload-invoker/test_metadata.out") try: if workload['perf_monitoring']['runtime_script']: runtime_script = 'bash ' + FAAS_ROOT + '/' + workload['perf_monitoring']['runtime_script'] + \ ' ' + str(int(workload['test_duration_in_seconds'])) + ' &' os.system(runtime_script) logger.info("Runtime monitoring script ran") except: pass logger.info("Test started") for thread in threads: thread.start() logger.info("Test ended") return True
def main(argv): """ The main function. """ parser = OptionParser() parser.add_option("-n", "--test_name", dest="test_name", default="latest_test", help="Name of test", metavar="FILE") parser.add_option("-r", "--rate_override", dest="rate_override", help="Override rate of invocation from arguments", metavar="FILE") parser.add_option( "-c", "--config_json", dest="config_json", help="The input json config file describing the synthetic workload.", metavar="FILE") parser.add_option("-b", "--benchmark", dest="benchmark", metavar="FILE") parser.add_option("-p", "--param_file", dest="param_file", metavar="FILE") (options, args) = parser.parse_args() log_dir, log_file = createDir(options.test_name) logger = ScriptLogger('workload_invoker', log_file) logger.info("Workload Invoker started") print("Log file -> ", log_file, "\n") if not CheckJSONConfig(options.config_json): logger.error("Invalid or no JSON config file!") return False # Abort the function if json file not valid workload = ReadJSONConfig(options.config_json) workload = ApplyJSONOverrides(workload, log_dir, options.rate_override, options.benchmark, options.param_file) if not CheckWorkloadValidity( workload=workload, supported_distributions=supported_distributions): return False # Abort the function if json file not valid [all_events, event_count] = GenericEventGenerator(workload) threads = [] for (instance, instance_times) in all_events.items(): # Previous method to run processes # instance_script = 'bash ' + FAAS_ROOT + '/invocation-scripts/' + \ # workload['instances'][instance]['application']+'.sh' # threads.append(threading.Thread(target=PROCESSInstanceGenerator, args=[instance, instance_script, instance_times, workload['blocking_cli']])) # New method action = workload['instances'][instance]['application'] try: param_file = workload['instances'][instance]['param_file'] except: param_file = None blocking_cli = workload['blocking_cli'] if 'data_file' in workload['instances'][instance].keys(): data_file = workload['instances'][instance]['data_file'] threads.append( threading.Thread( target=BinaryDataHTTPInstanceGenerator, args=[action, instance_times, blocking_cli, data_file])) else: threads.append( threading.Thread(target=HTTPInstanceGenerator, args=[ action, instance_times, blocking_cli, log_dir, param_file ])) pass # Dump Test Metadata metadata_file = log_dir + "/test_metadata.out" os.system("date +%s%N | cut -b1-13 > " + FAAS_ROOT + '/' + metadata_file) os.system("echo " + options.config_json + " >> " + FAAS_ROOT + '/' + metadata_file) os.system("echo " + str(event_count) + " >> " + FAAS_ROOT + '/' + metadata_file) try: if workload['perf_monitoring']['runtime_script']: runtime_script = 'bash ' + FAAS_ROOT + '/' + workload['perf_monitoring']['runtime_script'] + \ ' ' + str(int(workload['test_duration_in_seconds'])) + ' ' + FAAS_ROOT + '/' + log_dir + '/perf-mon.out' + ' &' logger.info(runtime_script) os.system(runtime_script) logger.info("Runtime monitoring script ran") except: pass logger.info("Test started") for thread in threads: thread.start() logger.info("Test ended") return True